You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2017/04/20 10:20:34 UTC

[01/23] lucene-solr:feature/autoscaling: Whitespace and spurious import.

Repository: lucene-solr
Updated Branches:
  refs/heads/feature/autoscaling 81e0f801f -> c9c29379a


Whitespace and spurious import.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c9c29379
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c9c29379
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c9c29379

Branch: refs/heads/feature/autoscaling
Commit: c9c29379a73bdeb01aaa1fea449fb91bc3004f9f
Parents: d8df9f8
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Thu Apr 20 12:18:41 2017 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Thu Apr 20 12:19:14 2017 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                               | 6 +++---
 .../src/java/org/apache/solr/spelling/SpellCheckCollator.java  | 2 --
 2 files changed, 3 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c9c29379/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 73c8ef9..16bb70f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -75,11 +75,11 @@ New Features
   tuples, one for each value in the multi-valued field. (Dennis Gove)
 
 * SOLR-10339: New set-trigger and remove-trigger APIs for autoscaling. (shalin)
- 
+
 * SOLR-10340: New set-listener and remove-listener API for autoscaling. (shalin)
- 
+
 * SOLR-10358: New suspend-trigger and resume-trigger APIs for autoscaling. (shalin)
- 
+
 * SOLR-10356: Adds basic math Streaming Evaluators (Dennis Gove)
 
 * SOLR-10393: Adds UUID Streaming Evaluator (Dennis Gove)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c9c29379/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
index 75917d0..12369f7 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
@@ -43,8 +43,6 @@ import org.apache.solr.search.SolrIndexSearcher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.params.CommonParams.ID;
-
 public class SpellCheckCollator {
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private int maxCollations = 1;


[05/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java
new file mode 100644
index 0000000..49a0809
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CalculatorStream.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class CalculatorStream extends TupleStream implements Expressible {
+
+  private static final long serialVersionUID = 1;
+  private boolean finished;
+
+
+
+  public CalculatorStream() throws IOException {
+  }
+
+  public CalculatorStream(StreamExpression expression, StreamFactory factory) throws IOException {
+  }
+
+
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException{
+    return toExpression(factory, true);
+  }
+
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    return new StreamExplanation(getStreamNodeId().toString())
+        .withFunctionName(factory.getFunctionName(this.getClass()))
+        .withImplementingClass(this.getClass().getName())
+        .withExpressionType(ExpressionType.STREAM_DECORATOR)
+        .withExpression(toExpression(factory, false).toString());
+  }
+
+  public void setStreamContext(StreamContext context) {
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList<TupleStream>();
+    return l;
+  }
+
+  public void open() throws IOException {
+
+  }
+
+  public void close() throws IOException {
+  }
+
+  public Tuple read() throws IOException {
+
+    if(finished) {
+      HashMap m = new HashMap();
+      m.put("EOF", true);
+      Tuple tuple = new Tuple(m);
+      return tuple;
+    } else {
+      HashMap m = new HashMap();
+      Tuple tuple = new Tuple(m);
+      finished = true;
+      return tuple;
+    }
+  }
+
+  /** Return the stream sort - ie, the order in which records are returned */
+  public StreamComparator getStreamSort(){
+    return null;
+  }
+
+  public int getCost() {
+    return 0;
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
index 1acd79d..6d1764a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
@@ -26,8 +26,6 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Random;
-import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
@@ -35,7 +33,6 @@ import java.util.concurrent.Future;
 import java.util.stream.Collectors;
 
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.CloudSolrClient.Builder;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.ComparatorOrder;
 import org.apache.solr.client.solrj.io.comp.FieldComparator;
@@ -52,9 +49,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.apache.solr.common.cloud.Aliases;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -80,7 +75,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
   protected String zkHost;
   protected String collection;
   protected SolrParams params;
-  private Map<String, String> fieldMappings;
+  protected Map<String, String> fieldMappings;
   protected StreamComparator comp;
   private boolean trace;
   protected transient Map<String, Tuple> eofTuples;
@@ -178,9 +173,11 @@ public class CloudSolrStream extends TupleStream implements Expressible {
     else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){
       zkHost = ((StreamExpressionValue)zkHostExpression.getParameter()).getValue();
     }
+    /*
     if(null == zkHost){
       throw new IOException(String.format(Locale.ROOT,"invalid expression %s - zkHost not found for collection '%s'",expression,collectionName));
     }
+    */
     
     // We've got all the required items
     init(collectionName, zkHost, mParams);
@@ -191,7 +188,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
     // functionName(collectionName, param1, param2, ..., paramN, sort="comp", [aliases="field=alias,..."])
     
     // function name
-    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass()));
     
     // collection
     expression.addParameter(collection);
@@ -254,7 +251,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
     return explanation;
   }
 
-  private void init(String collectionName, String zkHost, SolrParams params) throws IOException {
+  protected void init(String collectionName, String zkHost, SolrParams params) throws IOException {
     this.zkHost = zkHost;
     this.collection = collectionName;
     this.params = new ModifiableSolrParams(params);
@@ -299,14 +296,6 @@ public class CloudSolrStream extends TupleStream implements Expressible {
     this.tuples = new TreeSet();
     this.solrStreams = new ArrayList();
     this.eofTuples = Collections.synchronizedMap(new HashMap());
-    if (this.streamContext != null && this.streamContext.getSolrClientCache() != null) {
-      this.cloudSolrClient = this.streamContext.getSolrClientCache().getCloudSolrClient(zkHost);
-    } else {
-      this.cloudSolrClient = new Builder()
-          .withZkHost(zkHost)
-          .build();
-      this.cloudSolrClient.connect();
-    }
     constructStreams();
     openStreams();
   }
@@ -400,28 +389,15 @@ public class CloudSolrStream extends TupleStream implements Expressible {
 
   protected void constructStreams() throws IOException {
     try {
-      ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
-      ClusterState clusterState = zkStateReader.getClusterState();
 
-      Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, true);
+      List<String> shardUrls = getShards(this.zkHost, this.collection, this.streamContext);
 
-      ModifiableSolrParams mParams = new ModifiableSolrParams(params); 
+      ModifiableSolrParams mParams = new ModifiableSolrParams(params);
+      mParams = adjustParams(mParams);
       mParams.set(DISTRIB, "false"); // We are the aggregator.
 
-      Set<String> liveNodes = clusterState.getLiveNodes();
-      for(Slice slice : slices) {
-        Collection<Replica> replicas = slice.getReplicas();
-        List<Replica> shuffler = new ArrayList<>();
-        for(Replica replica : replicas) {
-          if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName()))
-          shuffler.add(replica);
-        }
-
-        Collections.shuffle(shuffler, new Random());
-        Replica rep = shuffler.get(0);
-        ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep);
-        String url = zkProps.getCoreUrl();
-        SolrStream solrStream = new SolrStream(url, mParams);
+      for(String shardUrl : shardUrls) {
+        SolrStream solrStream = new SolrStream(shardUrl, mParams);
         if(streamContext != null) {
           solrStream.setStreamContext(streamContext);
         }
@@ -467,12 +443,6 @@ public class CloudSolrStream extends TupleStream implements Expressible {
         solrStream.close();
       }
     }
-
-    if ((this.streamContext == null || this.streamContext.getSolrClientCache() == null) &&
-        cloudSolrClient != null) {
-
-      cloudSolrClient.close();
-    }
   }
   
   /** Return the stream sort - ie, the order in which records are returned */
@@ -571,4 +541,8 @@ public class CloudSolrStream extends TupleStream implements Expressible {
       }
     }
   }
+
+  protected ModifiableSolrParams adjustParams(ModifiableSolrParams params) {
+    return params;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java
new file mode 100644
index 0000000..2dd95fe
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EchoStream.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class EchoStream extends TupleStream implements Expressible {
+
+  private static final long serialVersionUID = 1;
+  private boolean finished;
+  private String echo;
+
+  public EchoStream(String echo) throws IOException {
+    this.echo = stripQuotes(echo);
+  }
+
+  public EchoStream(StreamExpression expression, StreamFactory factory) throws IOException {
+    this.echo = stripQuotes(factory.getValueOperand(expression, 0));
+    this.echo = echo.replace("\\\"", "\"");
+  }
+
+  private String stripQuotes(String s){
+    if(s.startsWith("\"")) {
+      return s.substring(1, s.length()-1);
+    } else {
+      return s;
+    }
+  }
+
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException{
+    return toExpression(factory, true);
+  }
+
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    expression.addParameter("\""+echo.replace("\"", "\\\"")+"\"");
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    return new StreamExplanation(getStreamNodeId().toString())
+        .withFunctionName(factory.getFunctionName(this.getClass()))
+        .withImplementingClass(this.getClass().getName())
+        .withExpressionType(ExpressionType.STREAM_DECORATOR)
+        .withExpression(toExpression(factory, false).toString());
+  }
+
+  public void setStreamContext(StreamContext context) {
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList<TupleStream>();
+    return l;
+  }
+
+  public void open() throws IOException {
+
+  }
+
+  public void close() throws IOException {
+  }
+
+  public Tuple read() throws IOException {
+
+    if(finished) {
+      HashMap m = new HashMap();
+      m.put("EOF", true);
+      Tuple tuple = new Tuple(m);
+      return tuple;
+    } else {
+      HashMap m = new HashMap();
+      m.put("echo", echo);
+      Tuple tuple = new Tuple(m);
+      finished = true;
+      return tuple;
+    }
+  }
+
+  /** Return the stream sort - ie, the order in which records are returned */
+  public StreamComparator getStreamSort(){
+    return null;
+  }
+
+  public int getCost() {
+    return 0;
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java
new file mode 100644
index 0000000..9fac56f
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/EvalStream.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class EvalStream extends TupleStream implements Expressible {
+
+  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private TupleStream stream;
+  private TupleStream evalStream;
+
+  private StreamFactory streamFactory;
+  private StreamContext streamContext;
+
+  public EvalStream(StreamExpression expression, StreamFactory factory) throws IOException {
+    // grab all parameters out
+    List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
+
+    if(1 != streamExpressions.size()){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting a single stream but found %d",expression, streamExpressions.size()));
+    }
+
+    TupleStream stream = factory.constructStream(streamExpressions.get(0));
+    init(stream, factory);
+  }
+
+  private void init(TupleStream tupleStream, StreamFactory factory) throws IOException{
+    this.stream = tupleStream;
+    this.streamFactory = factory;
+  }
+
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException {
+    return toExpression(factory, true);
+  }
+
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+
+    // stream
+    if(includeStreams) {
+      if (stream instanceof Expressible) {
+        expression.addParameter(((Expressible) stream).toExpression(factory));
+      } else {
+        throw new IOException("The EvalStream contains a non-expressible TupleStream - it cannot be converted to an expression");
+      }
+    }
+
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    return new StreamExplanation(getStreamNodeId().toString())
+        .withChildren(new Explanation[]{
+            stream.toExplanation(factory)
+        })
+        .withFunctionName(factory.getFunctionName(this.getClass()))
+        .withImplementingClass(this.getClass().getName())
+        .withExpressionType(ExpressionType.STREAM_DECORATOR)
+        .withExpression(toExpression(factory, false).toString());
+  }
+
+  public void setStreamContext(StreamContext streamContext) {
+    this.streamContext = streamContext;
+    this.stream.setStreamContext(streamContext);
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList();
+    l.add(stream);
+    return l;
+  }
+
+  public void open() throws IOException {
+    try {
+      stream.open();
+      Tuple tuple = stream.read();
+      String expr = tuple.getString("expr_s");
+
+      if(expr == null) {
+        throw new IOException("expr_s cannot be empty for the EvalStream");
+      }
+
+      evalStream = streamFactory.constructStream(expr);
+      evalStream.setStreamContext(streamContext);
+      evalStream.open();
+    } finally {
+      stream.close();
+    }
+  }
+
+  public void close() throws IOException {
+    evalStream.close();
+  }
+
+  public Tuple read() throws IOException {
+    return evalStream.read();
+  }
+
+  public StreamComparator getStreamSort(){
+    return stream.getStreamSort();
+  }
+
+  public int getCost() {
+    return 0;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
index 06e6fdc..2cd60ec 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
@@ -35,6 +35,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
 import static org.apache.solr.common.params.CommonParams.SORT;
@@ -208,9 +209,8 @@ public class FetchStream extends TupleStream implements Expressible {
   }
 
   private void fetchBatch() throws IOException {
-
     Tuple EOFTuple = null;
-    List<Tuple> batch = new ArrayList();
+    List<Tuple> batch = new ArrayList<>(batchSize);
     for(int i=0; i<batchSize; i++) {
       Tuple tuple = stream.read();
       if(tuple.EOF) {
@@ -222,18 +222,12 @@ public class FetchStream extends TupleStream implements Expressible {
     }
 
     if(batch.size() > 0) {
-      StringBuilder buf = new StringBuilder();
-      buf.append(rightKey);
-      buf.append(":(");
-      for (int i = 0; i < batch.size(); i++) {
-        if (i > 0) {
-          buf.append(" ");
-        }
-        Tuple tuple = batch.get(i);
+      StringBuilder buf = new StringBuilder(batch.size() * 10 + 20);
+      buf.append("{! df=").append(rightKey).append(" q.op=OR cache=false }");//disable queryCache
+      for (Tuple tuple : batch) {
         String key = tuple.getString(leftKey);
-        buf.append(key);
+        buf.append(' ').append(ClientUtils.escapeQueryChars(key));
       }
-      buf.append(")");
 
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.add("q", buf.toString());
@@ -245,7 +239,7 @@ public class FetchStream extends TupleStream implements Expressible {
       StreamContext newContext = new StreamContext();
       newContext.setSolrClientCache(streamContext.getSolrClientCache());
       cloudSolrStream.setStreamContext(newContext);
-      Map<String, Tuple> fetched = new HashMap();
+      Map<String, Tuple> fetched = new HashMap<>();
       try {
         cloudSolrStream.open();
         while (true) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HavingStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HavingStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HavingStream.java
index 35e8952..2f74bc5 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HavingStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/HavingStream.java
@@ -43,6 +43,7 @@ public class HavingStream extends TupleStream implements Expressible {
 
   private TupleStream stream;
   private BooleanEvaluator evaluator;
+  private StreamContext streamContext;
 
   private transient Tuple currentGroupHead;
 
@@ -128,6 +129,7 @@ public class HavingStream extends TupleStream implements Expressible {
   }
 
   public void setStreamContext(StreamContext context) {
+    this.streamContext = context;
     this.stream.setStreamContext(context);
   }
 
@@ -152,6 +154,7 @@ public class HavingStream extends TupleStream implements Expressible {
         return tuple;
       }
 
+      streamContext.getTupleContext().clear();
       if(evaluator.evaluate(tuple)){
         return tuple;
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
index 87e1354..58ba248 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java
@@ -18,14 +18,10 @@ package org.apache.solr.client.solrj.io.stream;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.Random;
-import java.util.Set;
 
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.FieldComparator;
@@ -38,11 +34,6 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkCoreNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
 import static org.apache.solr.common.params.CommonParams.DISTRIB;
@@ -263,27 +254,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
     try {
       Object pushStream = ((Expressible) tupleStream).toExpression(streamFactory);
 
-      ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
-
-      Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, true);
-
-      ClusterState clusterState = zkStateReader.getClusterState();
-      Set<String> liveNodes = clusterState.getLiveNodes();
-
-      List<Replica> shuffler = new ArrayList<>();
-      for(Slice slice : slices) {
-        Collection<Replica> replicas = slice.getReplicas();
-        for (Replica replica : replicas) {
-          if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName()))
-          shuffler.add(replica);
-        }
-      }
-
-      if(workers > shuffler.size()) {
-        throw new IOException("Number of workers exceeds nodes in the worker collection");
-      }
-
-      Collections.shuffle(shuffler, new Random());
+      List<String> shardUrls = getShards(this.zkHost, this.collection, this.streamContext);
 
       for(int w=0; w<workers; w++) {
         ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
@@ -293,9 +264,8 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
 
         paramsLoc.set("expr", pushStream.toString());
         paramsLoc.set("qt","/stream");
-        Replica rep = shuffler.get(w);
-        ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep);
-        String url = zkProps.getCoreUrl();
+
+        String url = shardUrls.get(w);
         SolrStream solrStream = new SolrStream(url, paramsLoc);
         solrStreams.add(solrStream);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java
index c0cbc17..36433e3 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SelectStream.java
@@ -26,6 +26,7 @@ import java.util.Set;
 
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.eval.EvaluatorException;
 import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
 import org.apache.solr.client.solrj.io.ops.StreamOperation;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation;
@@ -49,6 +50,7 @@ public class SelectStream extends TupleStream implements Expressible {
   private static final long serialVersionUID = 1;
 
   private TupleStream stream;
+  private StreamContext streamContext;
   private Map<String,String> selectedFields;
   private Map<StreamEvaluator,String> selectedEvaluators;
   private List<StreamOperation> operations;
@@ -124,8 +126,17 @@ public class SelectStream extends TupleStream implements Expressible {
               selectedEvaluators.put(factory.constructEvaluator(asValueExpression), asName);
               handled = true;
             }
-          }
-          catch(Throwable e){
+          } catch(Throwable e) {
+            Throwable t = e;
+            while(true) {
+              if(t instanceof EvaluatorException) {
+                throw new IOException(t);
+              }
+              t = t.getCause();
+              if(t == null) {
+                break;
+              }
+            }
             // it was not handled, so treat as a non-evaluator
           }
         }
@@ -213,6 +224,7 @@ public class SelectStream extends TupleStream implements Expressible {
   }
 
   public void setStreamContext(StreamContext context) {
+    this.streamContext = context;
     this.stream.setStreamContext(context);
     Set<StreamEvaluator> evaluators = selectedEvaluators.keySet();
 
@@ -245,6 +257,14 @@ public class SelectStream extends TupleStream implements Expressible {
     // create a copy with the limited set of fields
     Tuple workingToReturn = new Tuple(new HashMap<>());
     Tuple workingForEvaluators = new Tuple(new HashMap<>());
+
+    //Clear the TupleContext before running the evaluators.
+    //The TupleContext allows evaluators to cache values within the scope of a single tuple.
+    //For example a LocalDateTime could be parsed by one evaluator and used by other evaluators within the scope of the tuple.
+    //This avoids the need to create multiple LocalDateTime instances for the same tuple to satisfy a select expression.
+
+    streamContext.getTupleContext().clear();
+
     for(Object fieldName : original.fields.keySet()){
       workingForEvaluators.put(fieldName, original.get(fieldName));
       if(selectedFields.containsKey(fieldName)){

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java
new file mode 100644
index 0000000..d30918b
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+
+
+public class ShuffleStream extends CloudSolrStream implements Expressible {
+
+  public ShuffleStream(StreamExpression expression, StreamFactory factory) throws IOException {
+    // grab all parameters out
+    String collectionName = factory.getValueOperand(expression, 0);
+    List<StreamExpressionNamedParameter> namedParams = factory.getNamedOperands(expression);
+    StreamExpressionNamedParameter aliasExpression = factory.getNamedOperand(expression, "aliases");
+    StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost");
+
+    // Collection Name
+    if(null == collectionName){
+      throw new IOException(String.format(Locale.ROOT,"invalid expression %s - collectionName expected as first operand",expression));
+    }
+
+    // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't need to count it twice
+    if(expression.getParameters().size() != 1 + namedParams.size()){
+      throw new IOException(String.format(Locale.ROOT,"invalid expression %s - unknown operands found",expression));
+    }
+
+    // Named parameters - passed directly to solr as solrparams
+    if(0 == namedParams.size()){
+      throw new IOException(String.format(Locale.ROOT,"invalid expression %s - at least one named parameter expected. eg. 'q=*:*'",expression));
+    }
+
+    ModifiableSolrParams mParams = new ModifiableSolrParams();
+    for(StreamExpressionNamedParameter namedParam : namedParams){
+      if(!namedParam.getName().equals("zkHost") && !namedParam.getName().equals("aliases")){
+        mParams.add(namedParam.getName(), namedParam.getParameter().toString().trim());
+      }
+    }
+
+    // Aliases, optional, if provided then need to split
+    if(null != aliasExpression && aliasExpression.getParameter() instanceof StreamExpressionValue){
+      fieldMappings = new HashMap<>();
+      for(String mapping : ((StreamExpressionValue)aliasExpression.getParameter()).getValue().split(",")){
+        String[] parts = mapping.trim().split("=");
+        if(2 == parts.length){
+          fieldMappings.put(parts[0], parts[1]);
+        }
+        else{
+          throw new IOException(String.format(Locale.ROOT,"invalid expression %s - alias expected of the format origName=newName",expression));
+        }
+      }
+    }
+
+    // zkHost, optional - if not provided then will look into factory list to get
+    String zkHost = null;
+    if(null == zkHostExpression){
+      zkHost = factory.getCollectionZkHost(collectionName);
+      if(zkHost == null) {
+        zkHost = factory.getDefaultZkHost();
+      }
+    }
+    else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){
+      zkHost = ((StreamExpressionValue)zkHostExpression.getParameter()).getValue();
+    }
+    if(null == zkHost){
+      throw new IOException(String.format(Locale.ROOT,"invalid expression %s - zkHost not found for collection '%s'",expression,collectionName));
+    }
+
+    // We've got all the required items
+    init(collectionName, zkHost, mParams);
+  }
+
+  public ModifiableSolrParams adjustParams(ModifiableSolrParams mParams) {
+    mParams.set(CommonParams.QT, "/export");
+    return mParams;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
index 6cbf090..60a9274 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
@@ -36,6 +36,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 public class StreamContext implements Serializable{
 
   private Map entries = new HashMap();
+  private Map tupleContext = new HashMap();
   public int workerID;
   public int numWorkers;
   private SolrClientCache clientCache;
@@ -50,6 +51,10 @@ public class StreamContext implements Serializable{
     this.entries.put(key, value);
   }
 
+  public boolean containsKey(Object key) {
+    return entries.containsKey(key);
+  }
+
   public Map getEntries() {
     return this.entries;
   }
@@ -74,6 +79,10 @@ public class StreamContext implements Serializable{
     this.streamFactory = streamFactory;
   }
 
+  public Map getTupleContext() {
+    return tupleContext;
+  }
+
   public StreamFactory getStreamFactory() {
     return this.streamFactory;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index 8beb6ed..ec43e11 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -613,6 +613,44 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  public static class MoveReplica extends AsyncCollectionAdminRequest {
+    String collection, replica, targetNode;
+    String shard, fromNode;
+    boolean randomlyMoveReplica;
+
+    public MoveReplica(String collection, String replica, String targetNode) {
+      super(CollectionAction.MOVEREPLICA);
+      this.collection = collection;
+      this.replica = replica;
+      this.targetNode = targetNode;
+      this.randomlyMoveReplica = false;
+    }
+
+    public MoveReplica(String collection, String shard, String fromNode, String targetNode) {
+      super(CollectionAction.MOVEREPLICA);
+      this.collection = collection;
+      this.shard = shard;
+      this.fromNode = fromNode;
+      this.targetNode = targetNode;
+      this.randomlyMoveReplica = true;
+    }
+
+    @Override
+    public SolrParams getParams() {
+      ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
+      params.set("collection", collection);
+      params.set("targetNode", targetNode);
+      if (randomlyMoveReplica) {
+        params.set("shard", shard);
+        params.set("fromNode", fromNode);
+      } else {
+        params.set("replica", replica);
+      }
+      return params;
+    }
+  }
+
+
   /*
    * Returns a RebalanceLeaders object to rebalance leaders for a collection
    */
@@ -2208,6 +2246,20 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
 
   }
 
+  // LISTALIASES request
+  public static class ListAliases extends CollectionAdminRequest<CollectionAdminResponse> {
+
+    public ListAliases() {
+      super(CollectionAction.LISTALIASES);
+    }
+
+    @Override
+    protected CollectionAdminResponse createResponse(SolrClient client) {
+      return new CollectionAdminResponse();
+    }
+
+  }
+
   /**
    * Returns a SolrRequest to get a list of collections in the cluster
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
index 142710a..0d9867c 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
@@ -269,7 +269,7 @@ public class UpdateRequest extends AbstractUpdateRequest {
           return null;
         }
         String leaderUrl = urls.get(0);
-        LBHttpSolrClient.Req request = (LBHttpSolrClient.Req) routes
+        LBHttpSolrClient.Req request = routes
             .get(leaderUrl);
         if (request == null) {
           UpdateRequest updateRequest = new UpdateRequest();
@@ -278,6 +278,7 @@ public class UpdateRequest extends AbstractUpdateRequest {
           updateRequest.setParams(params);
           updateRequest.setPath(getPath());
           updateRequest.setBasicAuthCredentials(getBasicAuthUser(), getBasicAuthPassword());
+          updateRequest.setResponseParser(getResponseParser());
           request = new LBHttpSolrClient.Req(updateRequest, urls);
           routes.put(leaderUrl, request);
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java
index 82d4d6f..6821075 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.client.solrj.response;
 
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -61,6 +62,16 @@ public class CollectionAdminResponse extends SolrResponseBase
   }
 
   @SuppressWarnings("unchecked")
+  public Map<String, String> getAliases()
+  {
+    NamedList<Object> response = getResponse();
+    if (response.get("aliases") != null) {
+      return ((Map<String, String>)response.get("aliases"));
+    }
+    return Collections.emptyMap();
+  }
+
+  @SuppressWarnings("unchecked")
   public Map<String, NamedList<Integer>> getCollectionNodesStatus()
   {
     Map<String, NamedList<Integer>> res = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
index 302ee62..65bd81b 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
@@ -322,6 +322,10 @@ public class ClusterState implements JSONWriter.Writable {
       return new ClusterState(version, liveNodes, Collections.<String, DocCollection>emptyMap());
     }
     Map<String, Object> stateMap = (Map<String, Object>) Utils.fromJSON(bytes);
+    return load(version, stateMap, liveNodes, znode);
+  }
+
+  public static ClusterState load(Integer version, Map<String, Object> stateMap, Set<String> liveNodes, String znode) {
     Map<String,CollectionRef> collections = new LinkedHashMap<>(stateMap.size());
     for (Entry<String, Object> entry : stateMap.entrySet()) {
       String collectionName = entry.getKey();
@@ -332,7 +336,6 @@ public class ClusterState implements JSONWriter.Writable {
     return new ClusterState( liveNodes, collections,version);
   }
 
-
   public static Aliases load(byte[] bytes) {
     if (bytes == null || bytes.length == 0) {
       return new Aliases();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
index dc9efbe..40cc8ee 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
@@ -68,6 +68,7 @@ public interface CollectionParams {
     SYNCSHARD(true, LockLevel.SHARD),
     CREATEALIAS(true, LockLevel.COLLECTION),
     DELETEALIAS(true, LockLevel.COLLECTION),
+    LISTALIASES(false, LockLevel.NONE),
     SPLITSHARD(true, LockLevel.SHARD),
     DELETESHARD(true, LockLevel.SHARD),
     CREATESHARD(true, LockLevel.COLLECTION),

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/MergeIndexesExampleTestBase.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/MergeIndexesExampleTestBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/MergeIndexesExampleTestBase.java
index 1f16415..2c8ebe9 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/MergeIndexesExampleTestBase.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/MergeIndexesExampleTestBase.java
@@ -79,7 +79,7 @@ public abstract class MergeIndexesExampleTestBase extends SolrExampleTestBase {
     System.setProperty( "solr.core1.data.dir", this.dataDir2.getCanonicalPath() );
 
     setupCoreContainer();
-    log.info("CORES=" + cores + " : " + cores.getCoreNames());
+    log.info("CORES=" + cores + " : " + cores.getLoadedCoreNames());
 
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java
index 6adbaae..112a46d 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientCacheTest.java
@@ -118,9 +118,9 @@ public class CloudSolrClientCacheTest extends SolrTestCaseJ4 {
     return mockLbclient;
   }
 
-  private CloudSolrClient.ClusterStateProvider getStateProvider(Set<String> livenodes,
-                                                                Map<String, CollectionRef> colls) {
-    return new CloudSolrClient.ClusterStateProvider() {
+  private ClusterStateProvider getStateProvider(Set<String> livenodes,
+                                                                Map<String, ClusterState.CollectionRef> colls) {
+    return new ClusterStateProvider() {
       @Override
       public CollectionRef getState(String collection) {
         return colls.get(collection);
@@ -132,11 +132,6 @@ public class CloudSolrClientCacheTest extends SolrTestCaseJ4 {
       }
 
       @Override
-      public Map<String, Object> getClusterProperties() {
-        return Collections.EMPTY_MAP;
-      }
-
-      @Override
       public String getAlias(String collection) {
         return collection;
       }
@@ -153,6 +148,16 @@ public class CloudSolrClientCacheTest extends SolrTestCaseJ4 {
       public void close() throws IOException {
 
       }
+
+      @Override
+      public Object getClusterProperty(String propertyName) {
+        return null;
+      }
+
+      @Override
+      public Object getClusterProperty(String propertyName, String def) {
+        return def;
+      }
     };
 
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
index d22b37c..c91cb67 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
@@ -68,6 +68,7 @@ import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.handler.admin.CollectionsHandler;
 import org.apache.solr.handler.admin.ConfigSetsHandler;
 import org.apache.solr.handler.admin.CoreAdminHandler;
+import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
@@ -90,6 +91,8 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
   private static final int TIMEOUT = 30;
   private static final int NODE_COUNT = 3;
 
+  private static CloudSolrClient httpBasedCloudSolrClient = null;
+
   @BeforeClass
   public static void setupCluster() throws Exception {
     configureCluster(NODE_COUNT)
@@ -99,8 +102,21 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
     CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1).process(cluster.getSolrClient());
     AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(),
         false, true, TIMEOUT);
+    
+    httpBasedCloudSolrClient = new CloudSolrClient.Builder().withSolrUrl(
+        cluster.getJettySolrRunner(0).getBaseUrl().toString()).build();
   }
 
+  @AfterClass
+  public static void afterClass() {
+    if (httpBasedCloudSolrClient != null) {
+      try {
+        httpBasedCloudSolrClient.close();
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Before
@@ -110,6 +126,13 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
         .commit(cluster.getSolrClient(), COLLECTION);
   }
 
+  /**
+   * Randomly return the cluster's ZK based CSC, or HttpClusterProvider based CSC.
+   */
+  private CloudSolrClient getRandomClient() {
+    return random().nextBoolean()? cluster.getSolrClient(): httpBasedCloudSolrClient;
+  }
+
   @Test
   public void testParallelUpdateQTime() throws Exception {
     UpdateRequest req = new UpdateRequest();
@@ -118,7 +141,7 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
       doc.addField("id", String.valueOf(TestUtil.nextInt(random(), 1000, 1100)));
       req.add(doc);
     }
-    UpdateResponse response = req.process(cluster.getSolrClient(), COLLECTION);
+    UpdateResponse response = req.process(getRandomClient(), COLLECTION);
     // See SOLR-6547, we just need to ensure that no exception is thrown here
     assertTrue(response.getQTime() >= 0);
   }
@@ -143,33 +166,48 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
         .add(new SolrInputDocument(id, "1", "a_t", "hello2"), false)
         .commit(cluster.getSolrClient(), "overwrite");
       
-    resp = cluster.getSolrClient().query("overwrite", new SolrQuery("*:*"));
+    resp = getRandomClient().query("overwrite", new SolrQuery("*:*"));
     assertEquals("There should be 3 documents because there should be two id=1 docs due to overwrite=false", 3, resp.getResults().getNumFound());
 
   }
 
   @Test
+  public void testAliasHandling() throws Exception {
+    CloudSolrClient client = getRandomClient();
+    SolrInputDocument doc = new SolrInputDocument("id", "1", "title_s", "my doc");
+    client.add(COLLECTION, doc);
+    client.commit(COLLECTION);
+
+    CollectionAdminRequest.createAlias("testalias", COLLECTION).process(cluster.getSolrClient());
+
+    // ensure that the alias has been registered
+    assertEquals(COLLECTION,
+        new CollectionAdminRequest.ListAliases().process(cluster.getSolrClient()).getAliases().get("testalias"));
+
+    assertEquals(1, client.query(COLLECTION, params("q", "*:*")).getResults().getNumFound());
+    assertEquals(1, client.query("testalias", params("q", "*:*")).getResults().getNumFound());
+  }
+
+  @Test
   public void testHandlingOfStaleAlias() throws Exception {
-    try (CloudSolrClient client = getCloudSolrClient(cluster.getZkServer().getZkAddress())) {
-      client.setDefaultCollection("misconfigured-alias");
+    CloudSolrClient client = getRandomClient();
 
-      CollectionAdminRequest.createCollection("nemesis", "conf", 2, 1).process(client);
-      CollectionAdminRequest.createAlias("misconfigured-alias", "nemesis").process(client);
-      CollectionAdminRequest.deleteCollection("nemesis").process(client);
+    CollectionAdminRequest.createCollection("nemesis", "conf", 2, 1).process(client);
+    CollectionAdminRequest.createAlias("misconfigured-alias", "nemesis").process(client);
+    CollectionAdminRequest.deleteCollection("nemesis").process(client);
 
-      List<SolrInputDocument> docs = new ArrayList<>();
+    List<SolrInputDocument> docs = new ArrayList<>();
 
-      SolrInputDocument doc = new SolrInputDocument();
-      doc.addField(id, Integer.toString(1));
-      docs.add(doc);
+    SolrInputDocument doc = new SolrInputDocument();
+    doc.addField(id, Integer.toString(1));
+    docs.add(doc);
 
-      try {
-        client.add(docs);
-        fail("Alias points to non-existing collection, add should fail");
-      } catch (SolrException e) {
-        assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code());
-        assertTrue("Unexpected exception", e.getMessage().contains("Collection not found"));
-      }
+    try {
+      client.add("misconfigured-alias", docs);
+      fail("Alias points to non-existing collection, add should fail");
+    } catch (SolrException e) {
+      assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code());
+      assertTrue("Unexpected exception", e.getMessage().contains("Collection not found"));
     }
   }
 
@@ -182,8 +220,8 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
         .setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
     
     // Test single threaded routed updates for UpdateRequest
-    NamedList<Object> response = cluster.getSolrClient().request(request, COLLECTION);
-    if (cluster.getSolrClient().isDirectUpdatesToLeadersOnly()) {
+    NamedList<Object> response = getRandomClient().request(request, COLLECTION);
+    if (getRandomClient().isDirectUpdatesToLeadersOnly()) {
       checkSingleServer(response);
     }
     CloudSolrClient.RouteResponse rr = (CloudSolrClient.RouteResponse) response;
@@ -214,11 +252,11 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
         .deleteById("0")
         .deleteById("2")
         .commit(cluster.getSolrClient(), COLLECTION);
-    if (cluster.getSolrClient().isDirectUpdatesToLeadersOnly()) {
+    if (getRandomClient().isDirectUpdatesToLeadersOnly()) {
       checkSingleServer(uResponse.getResponse());
     }
 
-    QueryResponse qResponse = cluster.getSolrClient().query(COLLECTION, new SolrQuery("*:*"));
+    QueryResponse qResponse = getRandomClient().query(COLLECTION, new SolrQuery("*:*"));
     SolrDocumentList docs = qResponse.getResults();
     assertEquals(0, docs.getNumFound());
     
@@ -307,7 +345,7 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
       ModifiableSolrParams solrParams = new ModifiableSolrParams();
       solrParams.set(CommonParams.Q, "*:*");
       solrParams.set(ShardParams._ROUTE_, sameShardRoutes.get(random().nextInt(sameShardRoutes.size())));
-      log.info("output: {}", cluster.getSolrClient().query(COLLECTION, solrParams));
+      log.info("output: {}", getRandomClient().query(COLLECTION, solrParams));
     }
 
     // Request counts increase from expected nodes should aggregate to 1000, while there should be
@@ -362,10 +400,10 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
         .add(id, "0", "a_t", "hello1")
         .add(id, "2", "a_t", "hello2")
         .add(id, "3", "a_t", "hello2")
-        .commit(cluster.getSolrClient(), collectionName);
+        .commit(getRandomClient(), collectionName);
 
     // Run the actual test for 'preferLocalShards'
-    queryWithPreferLocalShards(cluster.getSolrClient(), true, collectionName);
+    queryWithPreferLocalShards(getRandomClient(), true, collectionName);
   }
 
   private void queryWithPreferLocalShards(CloudSolrClient cloudClient,
@@ -418,10 +456,10 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
 
   private Long getNumRequests(String baseUrl, String collectionName) throws
       SolrServerException, IOException {
-    return getNumRequests(baseUrl, collectionName, "QUERY", "standard", false);
+    return getNumRequests(baseUrl, collectionName, "QUERY", "standard", null, false);
   }
 
-  private Long getNumRequests(String baseUrl, String collectionName, String category, String key, boolean returnNumErrors) throws
+  private Long getNumRequests(String baseUrl, String collectionName, String category, String key, String scope, boolean returnNumErrors) throws
       SolrServerException, IOException {
 
     NamedList<Object> resp;
@@ -437,7 +475,21 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
       QueryRequest req = new QueryRequest(params);
       resp = client.request(req);
     }
-    return (Long) resp.findRecursive("solr-mbeans", category, key, "stats", returnNumErrors ? "errors" : "requests");
+    String name;
+    if (returnNumErrors) {
+      name = category + "." + (scope != null ? scope : key) + ".errors";
+    } else {
+      name = category + "." + (scope != null ? scope : key) + ".requests";
+    }
+    Map<String,Object> map = (Map<String,Object>)resp.findRecursive("solr-mbeans", category, key, "stats");
+    if (map == null) {
+      return null;
+    }
+    if (scope != null) { // admin handler uses a meter instead of counter here
+      return (Long)map.get(name + ".count");
+    } else {
+      return (Long) map.get(name);
+    }
   }
 
   @Test
@@ -458,7 +510,7 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
         for (String adminPath : adminPathToMbean.keySet()) {
           long errorsBefore = 0;
           for (JettySolrRunner runner : cluster.getJettySolrRunners()) {
-            Long numRequests = getNumRequests(runner.getBaseUrl().toString(), "foo", "ADMIN", adminPathToMbean.get(adminPath), true);
+            Long numRequests = getNumRequests(runner.getBaseUrl().toString(), "foo", "ADMIN", adminPathToMbean.get(adminPath), adminPath, true);
             errorsBefore += numRequests;
             log.info("Found {} requests to {} on {}", numRequests, adminPath, runner.getBaseUrl());
           }
@@ -475,7 +527,7 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
           }
           long errorsAfter = 0;
           for (JettySolrRunner runner : cluster.getJettySolrRunners()) {
-            Long numRequests = getNumRequests(runner.getBaseUrl().toString(), "foo", "ADMIN", adminPathToMbean.get(adminPath), true);
+            Long numRequests = getNumRequests(runner.getBaseUrl().toString(), "foo", "ADMIN", adminPathToMbean.get(adminPath), adminPath, true);
             errorsAfter += numRequests;
             log.info("Found {} requests to {} on {}", numRequests, adminPath, runner.getBaseUrl());
           }
@@ -644,7 +696,7 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
         .add("id", "2", "a_t", "hello2");
     updateRequest.setParam(UpdateParams.VERSIONS, Boolean.TRUE.toString());
 
-    NamedList<Object> response = updateRequest.commit(cluster.getSolrClient(), COLLECTION).getResponse();
+    NamedList<Object> response = updateRequest.commit(getRandomClient(), COLLECTION).getResponse();
     Object addsObject = response.get("adds");
     
     assertNotNull("There must be a adds parameter", addsObject);
@@ -663,7 +715,7 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
     assertTrue("Version for id 2 must be a long", object instanceof Long);
     versions.put("2", (Long) object);
 
-    QueryResponse resp = cluster.getSolrClient().query(COLLECTION, new SolrQuery("*:*"));
+    QueryResponse resp = getRandomClient().query(COLLECTION, new SolrQuery("*:*"));
     assertEquals("There should be one document because overwrite=true", 2, resp.getResults().getNumFound());
 
     for (SolrDocument doc : resp.getResults()) {
@@ -674,13 +726,38 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
     // assert that "deletes" are returned
     UpdateRequest deleteRequest = new UpdateRequest().deleteById("1");
     deleteRequest.setParam(UpdateParams.VERSIONS, Boolean.TRUE.toString());
-    response = deleteRequest.commit(cluster.getSolrClient(), COLLECTION).getResponse();
+    response = deleteRequest.commit(getRandomClient(), COLLECTION).getResponse();
     Object deletesObject = response.get("deletes");
     assertNotNull("There must be a deletes parameter", deletesObject);
     NamedList deletes = (NamedList) deletesObject;
     assertEquals("There must be 1 version", 1, deletes.size());
   }
   
+  @Test
+  public void testInitializationWithSolrUrls() throws Exception {
+    CloudSolrClient client = httpBasedCloudSolrClient;
+    SolrInputDocument doc = new SolrInputDocument("id", "1", "title_s", "my doc");
+    client.add(COLLECTION, doc);
+    client.commit(COLLECTION);
+    assertEquals(1, client.query(COLLECTION, params("q", "*:*")).getResults().getNumFound());
+  }
+
+  @Test
+  public void testCollectionDoesntExist() throws Exception {
+    CloudSolrClient client = getRandomClient();
+    SolrInputDocument doc = new SolrInputDocument("id", "1", "title_s", "my doc");
+    try {
+      client.add("boguscollectionname", doc);
+      fail();
+    } catch (SolrException ex) {
+      if (ex.getMessage().equals("Collection not found: boguscollectionname")) {
+        // pass
+      } else {
+        throw ex;
+      }
+    }
+  }
+
   private static void checkSingleServer(NamedList<Object> response) {
     final CloudSolrClient.RouteResponse rr = (CloudSolrClient.RouteResponse) response;
     final Map<String,LBHttpSolrClient.Req> routes = rr.getRoutes();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java
index e55c837..9fff33a 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Locale;
 
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.ComparatorOrder;
 import org.apache.solr.client.solrj.io.comp.FieldComparator;
@@ -205,6 +206,10 @@ public class JDBCStreamTest extends SolrCloudTestCase {
       statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
       statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('AL', 'Algeria')");
     }
+
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
     
     // Load Solr
     new UpdateRequest()
@@ -217,18 +222,25 @@ public class JDBCStreamTest extends SolrCloudTestCase {
       .withFunctionName("search", CloudSolrStream.class);
     
     List<Tuple> tuples;
-    
-    // Simple 1
-    TupleStream jdbcStream = new JDBCStream("jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by CODE", new FieldComparator("CODE", ComparatorOrder.ASCENDING));
-    TupleStream selectStream = new SelectStream(jdbcStream, new HashMap<String, String>(){{ put("CODE", "code_s"); put("COUNTRY_NAME", "name_s"); }});
-    TupleStream searchStream = factory.constructStream("search(" + COLLECTIONORALIAS + ", fl=\"code_s,name_s\",q=\"*:*\",sort=\"code_s asc\")");
-    TupleStream mergeStream = new MergeStream(new FieldComparator("code_s", ComparatorOrder.ASCENDING), new TupleStream[]{selectStream,searchStream});
-    
-    tuples = getTuples(mergeStream);
-    
-    assertEquals(7, tuples.size());
-    assertOrderOf(tuples, "code_s", "AL","CA","GB","NL","NO","NP","US");
-    assertOrderOf(tuples, "name_s", "Algeria", "Canada", "Great Britian", "Netherlands", "Norway", "Nepal", "United States");
+
+    try {
+      // Simple 1
+      TupleStream jdbcStream = new JDBCStream("jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by CODE", new FieldComparator("CODE", ComparatorOrder.ASCENDING));
+      TupleStream selectStream = new SelectStream(jdbcStream, new HashMap<String, String>() {{
+        put("CODE", "code_s");
+        put("COUNTRY_NAME", "name_s");
+      }});
+      TupleStream searchStream = factory.constructStream("search(" + COLLECTIONORALIAS + ", fl=\"code_s,name_s\",q=\"*:*\",sort=\"code_s asc\")");
+      TupleStream mergeStream = new MergeStream(new FieldComparator("code_s", ComparatorOrder.ASCENDING), new TupleStream[]{selectStream, searchStream});
+      mergeStream.setStreamContext(streamContext);
+      tuples = getTuples(mergeStream);
+
+      assertEquals(7, tuples.size());
+      assertOrderOf(tuples, "code_s", "AL", "CA", "GB", "NL", "NO", "NP", "US");
+      assertOrderOf(tuples, "name_s", "Algeria", "Canada", "Great Britian", "Netherlands", "Norway", "Nepal", "United States");
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -277,32 +289,41 @@ public class JDBCStreamTest extends SolrCloudTestCase {
     String expression;
     TupleStream stream;
     List<Tuple> tuples;
-    
-    // Basic test
-    expression =   
-              "innerJoin("
-            + "  select("
-            + "    search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
-            + "    personId_i as personId,"
-            + "    rating_f as rating"
-            + "  ),"
-            + "  select("
-            + "    jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\"),"
-            + "    ID as personId,"
-            + "    NAME as personName,"
-            + "    COUNTRY_NAME as country"
-            + "  ),"
-            + "  on=\"personId\""
-            + ")";
-
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
-    
-    assertEquals(10, tuples.size());
-    assertOrderOf(tuples, "personId", 11,12,13,14,15,16,17,18,19,20);
-    assertOrderOf(tuples, "rating", 3.5d,5d,2.2d,4.3d,3.5d,3d,3d,4d,4.1d,4.8d);
-    assertOrderOf(tuples, "personName", "Emma","Grace","Hailey","Isabella","Lily","Madison","Mia","Natalie","Olivia","Samantha");
-    assertOrderOf(tuples, "country", "Netherlands","United States","Netherlands","Netherlands","Netherlands","United States","United States","Netherlands","Netherlands","United States");
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      // Basic test
+      expression =
+          "innerJoin("
+              + "  select("
+              + "    search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
+              + "    personId_i as personId,"
+              + "    rating_f as rating"
+              + "  ),"
+              + "  select("
+              + "    jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\"),"
+              + "    ID as personId,"
+              + "    NAME as personName,"
+              + "    COUNTRY_NAME as country"
+              + "  ),"
+              + "  on=\"personId\""
+              + ")";
+
+
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assertEquals(10, tuples.size());
+      assertOrderOf(tuples, "personId", 11, 12, 13, 14, 15, 16, 17, 18, 19, 20);
+      assertOrderOf(tuples, "rating", 3.5d, 5d, 2.2d, 4.3d, 3.5d, 3d, 3d, 4d, 4.1d, 4.8d);
+      assertOrderOf(tuples, "personName", "Emma", "Grace", "Hailey", "Isabella", "Lily", "Madison", "Mia", "Natalie", "Olivia", "Samantha");
+      assertOrderOf(tuples, "country", "Netherlands", "United States", "Netherlands", "Netherlands", "Netherlands", "United States", "United States", "Netherlands", "Netherlands", "United States");
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -351,58 +372,67 @@ public class JDBCStreamTest extends SolrCloudTestCase {
     String expression;
     TupleStream stream;
     List<Tuple> tuples;
-    
-    // Basic test for no alias
-    expression =
-              "innerJoin("
-            + "  select("
-            + "    search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
-            + "    personId_i as personId,"
-            + "    rating_f as rating"
-            + "  ),"
-            + "  select("
-            + "    jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\"),"
-            + "    ID as personId,"
-            + "    NAME as personName,"
-            + "    COUNTRY_NAME as country"
-            + "  ),"
-            + "  on=\"personId\""
-            + ")";
-
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
-    
-    assertEquals(10, tuples.size());
-    assertOrderOf(tuples, "personId", 11,12,13,14,15,16,17,18,19,20);
-    assertOrderOf(tuples, "rating", 3.5d,5d,2.2d,4.3d,3.5d,3d,3d,4d,4.1d,4.8d);
-    assertOrderOf(tuples, "personName", "Emma","Grace","Hailey","Isabella","Lily","Madison","Mia","Natalie","Olivia","Samantha");
-    assertOrderOf(tuples, "country", "Netherlands","United States","Netherlands","Netherlands","Netherlands","United States","United States","Netherlands","Netherlands","United States");
-    
-    // Basic test for alias
-    expression =   
-              "innerJoin("
-            + "  select("
-            + "    search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
-            + "    personId_i as personId,"
-            + "    rating_f as rating"
-            + "  ),"
-            + "  select("
-            + "    jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID as PERSONID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"PERSONID asc\"),"
-            + "    PERSONID as personId,"
-            + "    NAME as personName,"
-            + "    COUNTRY_NAME as country"
-            + "  ),"
-            + "  on=\"personId\""
-            + ")";
-
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
-    
-    assertEquals(10, tuples.size());
-    assertOrderOf(tuples, "personId", 11,12,13,14,15,16,17,18,19,20);
-    assertOrderOf(tuples, "rating", 3.5d,5d,2.2d,4.3d,3.5d,3d,3d,4d,4.1d,4.8d);
-    assertOrderOf(tuples, "personName", "Emma","Grace","Hailey","Isabella","Lily","Madison","Mia","Natalie","Olivia","Samantha");
-    assertOrderOf(tuples, "country", "Netherlands","United States","Netherlands","Netherlands","Netherlands","United States","United States","Netherlands","Netherlands","United States");
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      // Basic test for no alias
+      expression =
+          "innerJoin("
+              + "  select("
+              + "    search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
+              + "    personId_i as personId,"
+              + "    rating_f as rating"
+              + "  ),"
+              + "  select("
+              + "    jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\"),"
+              + "    ID as personId,"
+              + "    NAME as personName,"
+              + "    COUNTRY_NAME as country"
+              + "  ),"
+              + "  on=\"personId\""
+              + ")";
+
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assertEquals(10, tuples.size());
+      assertOrderOf(tuples, "personId", 11, 12, 13, 14, 15, 16, 17, 18, 19, 20);
+      assertOrderOf(tuples, "rating", 3.5d, 5d, 2.2d, 4.3d, 3.5d, 3d, 3d, 4d, 4.1d, 4.8d);
+      assertOrderOf(tuples, "personName", "Emma", "Grace", "Hailey", "Isabella", "Lily", "Madison", "Mia", "Natalie", "Olivia", "Samantha");
+      assertOrderOf(tuples, "country", "Netherlands", "United States", "Netherlands", "Netherlands", "Netherlands", "United States", "United States", "Netherlands", "Netherlands", "United States");
+
+      // Basic test for alias
+      expression =
+          "innerJoin("
+              + "  select("
+              + "    search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
+              + "    personId_i as personId,"
+              + "    rating_f as rating"
+              + "  ),"
+              + "  select("
+              + "    jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID as PERSONID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"PERSONID asc\"),"
+              + "    PERSONID as personId,"
+              + "    NAME as personName,"
+              + "    COUNTRY_NAME as country"
+              + "  ),"
+              + "  on=\"personId\""
+              + ")";
+
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assertEquals(10, tuples.size());
+      assertOrderOf(tuples, "personId", 11, 12, 13, 14, 15, 16, 17, 18, 19, 20);
+      assertOrderOf(tuples, "rating", 3.5d, 5d, 2.2d, 4.3d, 3.5d, 3d, 3d, 4d, 4.1d, 4.8d);
+      assertOrderOf(tuples, "personName", "Emma", "Grace", "Hailey", "Isabella", "Lily", "Madison", "Mia", "Natalie", "Olivia", "Samantha");
+      assertOrderOf(tuples, "country", "Netherlands", "United States", "Netherlands", "Netherlands", "Netherlands", "United States", "United States", "Netherlands", "Netherlands", "United States");
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -439,7 +469,7 @@ public class JDBCStreamTest extends SolrCloudTestCase {
       statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
       statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')");
     }
-    
+
     // Load solr data
     new UpdateRequest()
         .add(id, "1", "rating_f", "3.5", "personId_i", "11")
@@ -457,50 +487,58 @@ public class JDBCStreamTest extends SolrCloudTestCase {
     String expression;
     TupleStream stream;
     List<Tuple> tuples;
-    
-    // Basic test
-    expression =   
-              "rollup("
-            + "  hashJoin("
-            + "    hashed=select("
-            + "      search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
-            + "      personId_i as personId,"
-            + "      rating_f as rating"
-            + "    ),"
-            + "    select("
-            + "      jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by COUNTRIES.COUNTRY_NAME\", sort=\"COUNTRIES.COUNTRY_NAME asc\"),"
-            + "      ID as personId,"
-            + "      NAME as personName,"
-            + "      COUNTRY_NAME as country"
-            + "    ),"
-            + "    on=\"personId\""
-            + "  ),"
-            + "  over=\"country\","
-            + "  max(rating),"
-            + "  min(rating),"
-            + "  avg(rating),"
-            + "  count(*)"
-            + ")";
-
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
-    
-    assertEquals(2, tuples.size());
-    
-    Tuple tuple = tuples.get(0);
-    assertEquals("Netherlands",tuple.getString("country"));
-    assertTrue(4.3D == tuple.getDouble("max(rating)"));
-    assertTrue(2.2D == tuple.getDouble("min(rating)"));
-    assertTrue(3.6D == tuple.getDouble("avg(rating)"));
-    assertTrue(6D == tuple.getDouble("count(*)"));
-    
-    tuple = tuples.get(1);
-    assertEquals("United States",tuple.getString("country"));
-    assertTrue(5D == tuple.getDouble("max(rating)"));
-    assertTrue(3D == tuple.getDouble("min(rating)"));
-    assertTrue(3.95D == tuple.getDouble("avg(rating)"));
-    assertTrue(4D == tuple.getDouble("count(*)"));
-    
+
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      // Basic test
+      expression =
+          "rollup("
+              + "  hashJoin("
+              + "    hashed=select("
+              + "      search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
+              + "      personId_i as personId,"
+              + "      rating_f as rating"
+              + "    ),"
+              + "    select("
+              + "      jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by COUNTRIES.COUNTRY_NAME\", sort=\"COUNTRIES.COUNTRY_NAME asc\"),"
+              + "      ID as personId,"
+              + "      NAME as personName,"
+              + "      COUNTRY_NAME as country"
+              + "    ),"
+              + "    on=\"personId\""
+              + "  ),"
+              + "  over=\"country\","
+              + "  max(rating),"
+              + "  min(rating),"
+              + "  avg(rating),"
+              + "  count(*)"
+              + ")";
+
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assertEquals(2, tuples.size());
+
+      Tuple tuple = tuples.get(0);
+      assertEquals("Netherlands", tuple.getString("country"));
+      assertTrue(4.3D == tuple.getDouble("max(rating)"));
+      assertTrue(2.2D == tuple.getDouble("min(rating)"));
+      assertTrue(3.6D == tuple.getDouble("avg(rating)"));
+      assertTrue(6D == tuple.getDouble("count(*)"));
+
+      tuple = tuples.get(1);
+      assertEquals("United States", tuple.getString("country"));
+      assertTrue(5D == tuple.getDouble("max(rating)"));
+      assertTrue(3D == tuple.getDouble("min(rating)"));
+      assertTrue(3.95D == tuple.getDouble("avg(rating)"));
+      assertTrue(4D == tuple.getDouble("count(*)"));
+    } finally {
+      solrClientCache.close();
+    }
   }
   
   @Test(expected=IOException.class)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java
index b91df8d..75bf92d 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java
@@ -24,6 +24,7 @@ import java.util.Map;
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.eval.AddEvaluator;
 import org.apache.solr.client.solrj.io.eval.GreaterThanEvaluator;
@@ -92,6 +93,9 @@ public class SelectWithEvaluatorsTest extends SolrCloudTestCase {
     String clause;
     TupleStream stream;
     List<Tuple> tuples;
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
     
     StreamFactory factory = new StreamFactory()
       .withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
@@ -101,21 +105,24 @@ public class SelectWithEvaluatorsTest extends SolrCloudTestCase {
       .withFunctionName("if", IfThenElseEvaluator.class)
       .withFunctionName("gt", GreaterThanEvaluator.class)
       ;
-    
-    // Basic test
-    clause = "select("
-            +   "id,"
-            +   "add(b_i,c_d) as result,"
-            +   "search(collection1, q=*:*, fl=\"id,a_s,b_i,c_d,d_b\", sort=\"id asc\")"
-            + ")";
-    stream = factory.constructStream(clause);
-    tuples = getTuples(stream);
-    assertFields(tuples, "id", "result");
-    assertNotFields(tuples, "a_s", "b_i", "c_d", "d_b");
-    assertEquals(1, tuples.size());
-    assertDouble(tuples.get(0), "result", 4.3);
-    assertEquals(4.3, tuples.get(0).get("result"));
-
+    try {
+      // Basic test
+      clause = "select("
+          + "id,"
+          + "add(b_i,c_d) as result,"
+          + "search(collection1, q=*:*, fl=\"id,a_s,b_i,c_d,d_b\", sort=\"id asc\")"
+          + ")";
+      stream = factory.constructStream(clause);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+      assertFields(tuples, "id", "result");
+      assertNotFields(tuples, "a_s", "b_i", "c_d", "d_b");
+      assertEquals(1, tuples.size());
+      assertDouble(tuples.get(0), "result", 4.3);
+      assertEquals(4.3, tuples.get(0).get("result"));
+    } finally {
+      solrClientCache.close();
+    }
   }
   
   protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException {


[16/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
index de39a61..8474f55 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
@@ -134,7 +134,7 @@ public class MetricsCollectorHandler extends RequestHandlerBase {
 
   @Override
   public String getDescription() {
-    return "Handler for collecting and aggregating metric reports.";
+    return "Handler for collecting and aggregating SolrCloud metric reports.";
   }
 
   private static class MetricUpdateProcessor extends UpdateRequestProcessor {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index 4dc86d9..9dda6ae 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -19,6 +19,7 @@ package org.apache.solr.handler.admin;
 
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.EnumSet;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -52,6 +53,14 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
   final SolrMetricManager metricManager;
 
   public static final String COMPACT_PARAM = "compact";
+  public static final String PREFIX_PARAM = "prefix";
+  public static final String REGEX_PARAM = "regex";
+  public static final String PROPERTY_PARAM = "property";
+  public static final String REGISTRY_PARAM = "registry";
+  public static final String GROUP_PARAM = "group";
+  public static final String TYPE_PARAM = "type";
+
+  public static final String ALL = "all";
 
   public MetricsHandler() {
     this.container = null;
@@ -76,6 +85,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
 
     boolean compact = req.getParams().getBool(COMPACT_PARAM, false);
     MetricFilter mustMatchFilter = parseMustMatchFilter(req);
+    MetricUtils.PropertyFilter propertyFilter = parsePropertyFilter(req);
     List<MetricType> metricTypes = parseMetricTypes(req);
     List<MetricFilter> metricFilters = metricTypes.stream().map(MetricType::asMetricFilter).collect(Collectors.toList());
     Set<String> requestedRegistries = parseRegistries(req);
@@ -83,30 +93,67 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
     NamedList response = new SimpleOrderedMap();
     for (String registryName : requestedRegistries) {
       MetricRegistry registry = metricManager.registry(registryName);
-      response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false,
-          false, compact, null));
+      SimpleOrderedMap result = new SimpleOrderedMap();
+      MetricUtils.toMaps(registry, metricFilters, mustMatchFilter, propertyFilter, false,
+          false, compact, false, (k, v) -> result.add(k, v));
+      if (result.size() > 0) {
+        response.add(registryName, result);
+      }
     }
     rsp.getValues().add("metrics", response);
   }
 
   private MetricFilter parseMustMatchFilter(SolrQueryRequest req) {
-    String[] prefixes = req.getParams().getParams("prefix");
-    MetricFilter mustMatchFilter;
+    String[] prefixes = req.getParams().getParams(PREFIX_PARAM);
+    MetricFilter prefixFilter = null;
     if (prefixes != null && prefixes.length > 0) {
       Set<String> prefixSet = new HashSet<>();
       for (String prefix : prefixes) {
         prefixSet.addAll(StrUtils.splitSmart(prefix, ','));
       }
-      mustMatchFilter = new SolrMetricManager.PrefixFilter((String[])prefixSet.toArray(new String[prefixSet.size()]));
-    } else  {
+      prefixFilter = new SolrMetricManager.PrefixFilter(prefixSet);
+    }
+    String[] regexes = req.getParams().getParams(REGEX_PARAM);
+    MetricFilter regexFilter = null;
+    if (regexes != null && regexes.length > 0) {
+      regexFilter = new SolrMetricManager.RegexFilter(regexes);
+    }
+    MetricFilter mustMatchFilter;
+    if (prefixFilter == null && regexFilter == null) {
       mustMatchFilter = MetricFilter.ALL;
+    } else {
+      if (prefixFilter == null) {
+        mustMatchFilter = regexFilter;
+      } else if (regexFilter == null) {
+        mustMatchFilter = prefixFilter;
+      } else {
+        mustMatchFilter = new SolrMetricManager.OrFilter(prefixFilter, regexFilter);
+      }
     }
     return mustMatchFilter;
   }
 
+  private MetricUtils.PropertyFilter parsePropertyFilter(SolrQueryRequest req) {
+    String[] props = req.getParams().getParams(PROPERTY_PARAM);
+    if (props == null || props.length == 0) {
+      return MetricUtils.PropertyFilter.ALL;
+    }
+    final Set<String> filter = new HashSet<>();
+    for (String prop : props) {
+      if (prop != null && !prop.trim().isEmpty()) {
+        filter.add(prop.trim());
+      }
+    }
+    if (filter.isEmpty()) {
+      return MetricUtils.PropertyFilter.ALL;
+    } else {
+      return (name) -> filter.contains(name);
+    }
+  }
+
   private Set<String> parseRegistries(SolrQueryRequest req) {
-    String[] groupStr = req.getParams().getParams("group");
-    String[] registryStr = req.getParams().getParams("registry");
+    String[] groupStr = req.getParams().getParams(GROUP_PARAM);
+    String[] registryStr = req.getParams().getParams(REGISTRY_PARAM);
     if ((groupStr == null || groupStr.length == 0) && (registryStr == null || registryStr.length == 0)) {
       // return all registries
       return container.getMetricManager().registryNames();
@@ -118,7 +165,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
       for (String g : groupStr) {
         List<String> split = StrUtils.splitSmart(g, ',');
         for (String s : split) {
-          if (s.trim().equals("all")) {
+          if (s.trim().equals(ALL)) {
             allRegistries = true;
             break;
           }
@@ -137,7 +184,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
       for (String r : registryStr) {
         List<String> split = StrUtils.splitSmart(r, ',');
         for (String s : split) {
-          if (s.trim().equals("all")) {
+          if (s.trim().equals(ALL)) {
             allRegistries = true;
             break;
           }
@@ -161,7 +208,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
   }
 
   private List<MetricType> parseMetricTypes(SolrQueryRequest req) {
-    String[] typeStr = req.getParams().getParams("type");
+    String[] typeStr = req.getParams().getParams(TYPE_PARAM);
     List<String> types = Collections.emptyList();
     if (typeStr != null && typeStr.length > 0)  {
       types = new ArrayList<>();
@@ -176,7 +223,8 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
         metricTypes = types.stream().map(String::trim).map(MetricType::valueOf).collect(Collectors.toList());
       }
     } catch (IllegalArgumentException e) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid metric type in: " + types + " specified. Must be one of (all, meter, timer, histogram, counter, gauge)", e);
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid metric type in: " + types +
+          " specified. Must be one of " + MetricType.SUPPORTED_TYPES_MSG, e);
     }
     return metricTypes;
   }
@@ -199,6 +247,8 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
     gauge(Gauge.class),
     all(null);
 
+    public static final String SUPPORTED_TYPES_MSG = EnumSet.allOf(MetricType.class).toString();
+
     private final Class klass;
 
     MetricType(Class klass) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/PluginInfoHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/PluginInfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/PluginInfoHandler.java
index a096e79..8bdc478 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/PluginInfoHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/PluginInfoHandler.java
@@ -16,14 +16,12 @@
  */
 package org.apache.solr.handler.admin;
 
-import java.net.URL;
-import java.util.ArrayList;
 import java.util.Map;
 
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
@@ -48,13 +46,13 @@ public class PluginInfoHandler extends RequestHandlerBase
   private static SimpleOrderedMap<Object> getSolrInfoBeans( SolrCore core, boolean stats )
   {
     SimpleOrderedMap<Object> list = new SimpleOrderedMap<>();
-    for (SolrInfoMBean.Category cat : SolrInfoMBean.Category.values()) 
+    for (SolrInfoBean.Category cat : SolrInfoBean.Category.values())
     {
       SimpleOrderedMap<Object> category = new SimpleOrderedMap<>();
       list.add( cat.name(), category );
-      Map<String, SolrInfoMBean> reg = core.getInfoRegistry();
-      for (Map.Entry<String,SolrInfoMBean> entry : reg.entrySet()) {
-        SolrInfoMBean m = entry.getValue();
+      Map<String, SolrInfoBean> reg = core.getInfoRegistry();
+      for (Map.Entry<String,SolrInfoBean> entry : reg.entrySet()) {
+        SolrInfoBean m = entry.getValue();
         if (m.getCategory() != cat) continue;
 
         String na = "Not Declared";
@@ -62,21 +60,10 @@ public class PluginInfoHandler extends RequestHandlerBase
         category.add( entry.getKey(), info );
 
         info.add( NAME,          (m.getName()       !=null ? m.getName()        : na) );
-        info.add( "version",     (m.getVersion()    !=null ? m.getVersion()     : na) );
         info.add( "description", (m.getDescription()!=null ? m.getDescription() : na) );
-        info.add( "source",      (m.getSource()     !=null ? m.getSource()      : na) );
 
-        URL[] urls = m.getDocs();
-        if ((urls != null) && (urls.length > 0)) {
-          ArrayList<String> docs = new ArrayList<>(urls.length);
-          for( URL u : urls ) {
-            docs.add( u.toExternalForm() );
-          }
-          info.add( "docs", docs );
-        }
-
-        if( stats ) {
-          info.add( "stats", m.getStatistics() );
+        if (stats) {
+          info.add( "stats", m.getMetricsSnapshot());
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
index d370bef..0c2c903 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
@@ -196,7 +196,7 @@ class PrepRecoveryOp implements CoreAdminHandler.CoreAdminOp {
                 .getNewestSearcher(false);
             SolrIndexSearcher searcher = searchHolder.get();
             try {
-              log.debug(core.getCoreDescriptor().getCoreContainer()
+              log.debug(core.getCoreContainer()
                   .getZkController().getNodeName()
                   + " to replicate "
                   + searcher.search(new MatchAllDocsQuery(), 1).totalHits

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/RequestSyncShardOp.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/RequestSyncShardOp.java b/solr/core/src/java/org/apache/solr/handler/admin/RequestSyncShardOp.java
index 584a7ca..dd4ae38 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/RequestSyncShardOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/RequestSyncShardOp.java
@@ -58,7 +58,7 @@ class RequestSyncShardOp implements CoreAdminHandler.CoreAdminOp {
     try (SolrCore core = it.handler.coreContainer.getCore(cname)) {
 
       if (core != null) {
-        syncStrategy = new SyncStrategy(core.getCoreDescriptor().getCoreContainer());
+        syncStrategy = new SyncStrategy(core.getCoreContainer());
 
         Map<String, Object> props = new HashMap<>();
         props.put(ZkStateReader.BASE_URL_PROP, zkController.getBaseUrl());
@@ -73,7 +73,7 @@ class RequestSyncShardOp implements CoreAdminHandler.CoreAdminOp {
                 .getNewestSearcher(false);
             SolrIndexSearcher searcher = searchHolder.get();
             try {
-              log.debug(core.getCoreDescriptor().getCoreContainer()
+              log.debug(core.getCoreContainer()
                   .getZkController().getNodeName()
                   + " synched "
                   + searcher.search(new MatchAllDocsQuery(), 1).totalHits);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java
index 805a690..02577f1 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ShowFileRequestHandler.java
@@ -127,7 +127,7 @@ public class ShowFileRequestHandler extends RequestHandlerBase
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp)
       throws InterruptedException, KeeperException, IOException {
 
-    CoreContainer coreContainer = req.getCore().getCoreDescriptor().getCoreContainer();
+    CoreContainer coreContainer = req.getCore().getCoreContainer();
     if (coreContainer.isZooKeeperAware()) {
       showFromZooKeeper(req, rsp, coreContainer);
     } else {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java
index f5f28c5..4faa466 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SolrInfoMBeanHandler.java
@@ -20,7 +20,7 @@ import org.apache.commons.io.IOUtils;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.client.solrj.impl.XMLResponseParser;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.ContentStream;
@@ -30,10 +30,7 @@ import org.apache.solr.response.BinaryResponseWriter;
 import org.apache.solr.response.SolrQueryResponse;
 
 import java.io.StringReader;
-import java.net.URL;
 import java.text.NumberFormat;
-import java.util.ArrayList;
-import java.util.List;
 import java.util.Locale;
 import java.util.Set;
 import java.util.Map;
@@ -117,7 +114,7 @@ public class SolrInfoMBeanHandler extends RequestHandlerBase {
     
     String[] requestedCats = req.getParams().getParams("cat");
     if (null == requestedCats || 0 == requestedCats.length) {
-      for (SolrInfoMBean.Category cat : SolrInfoMBean.Category.values()) {
+      for (SolrInfoBean.Category cat : SolrInfoBean.Category.values()) {
         cats.add(cat.name(), new SimpleOrderedMap<NamedList<Object>>());
       }
     } else {
@@ -128,39 +125,27 @@ public class SolrInfoMBeanHandler extends RequestHandlerBase {
          
     Set<String> requestedKeys = arrayToSet(req.getParams().getParams("key"));
     
-    Map<String, SolrInfoMBean> reg = req.getCore().getInfoRegistry();
-    for (Map.Entry<String, SolrInfoMBean> entry : reg.entrySet()) {
+    Map<String, SolrInfoBean> reg = req.getCore().getInfoRegistry();
+    for (Map.Entry<String, SolrInfoBean> entry : reg.entrySet()) {
       addMBean(req, cats, requestedKeys, entry.getKey(),entry.getValue());
     }
 
-    for (SolrInfoMBean infoMBean : req.getCore().getCoreDescriptor().getCoreContainer().getResourceLoader().getInfoMBeans()) {
+    for (SolrInfoBean infoMBean : req.getCore().getCoreContainer().getResourceLoader().getInfoMBeans()) {
       addMBean(req,cats,requestedKeys,infoMBean.getName(),infoMBean);
     }
     return cats;
   }
 
-  private void addMBean(SolrQueryRequest req, NamedList<NamedList<NamedList<Object>>> cats, Set<String> requestedKeys, String key, SolrInfoMBean m) {
+  private void addMBean(SolrQueryRequest req, NamedList<NamedList<NamedList<Object>>> cats, Set<String> requestedKeys, String key, SolrInfoBean m) {
     if ( ! ( requestedKeys.isEmpty() || requestedKeys.contains(key) ) ) return;
     NamedList<NamedList<Object>> catInfo = cats.get(m.getCategory().name());
     if ( null == catInfo ) return;
     NamedList<Object> mBeanInfo = new SimpleOrderedMap<>();
     mBeanInfo.add("class", m.getName());
-    mBeanInfo.add("version", m.getVersion());
     mBeanInfo.add("description", m.getDescription());
-    mBeanInfo.add("src", m.getSource());
-
-    // Use an external form
-    URL[] urls = m.getDocs();
-    if(urls!=null) {
-      List<String> docs = new ArrayList<>(urls.length);
-      for(URL url : urls) {
-        docs.add(url.toExternalForm());
-      }
-      mBeanInfo.add("docs", docs);
-    }
 
     if (req.getParams().getFieldBool(key, "stats", false))
-      mBeanInfo.add("stats", m.getStatistics());
+      mBeanInfo.add("stats", m.getMetricsSnapshot());
 
     catInfo.add(key, mBeanInfo);
   }
@@ -246,6 +231,9 @@ public class SolrInfoMBeanHandler extends RequestHandlerBase {
   }
   
   public Object diffObject(Object ref, Object now) {
+    if (now instanceof Map) {
+      now = new NamedList((Map)now);
+    }
     if(ref instanceof NamedList) {
       return diffNamedList((NamedList)ref, (NamedList)now);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
index fc1679f..8a2786c 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SystemInfoHandler.java
@@ -16,10 +16,6 @@
  */
 package org.apache.solr.handler.admin;
 
-import java.beans.BeanInfo;
-import java.beans.IntrospectionException;
-import java.beans.Introspector;
-import java.beans.PropertyDescriptor;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
@@ -27,23 +23,20 @@ import java.io.InputStreamReader;
 import java.lang.invoke.MethodHandles;
 import java.lang.management.ManagementFactory;
 import java.lang.management.OperatingSystemMXBean;
-import java.lang.management.PlatformManagedObject;
 import java.lang.management.RuntimeMXBean;
-import java.lang.reflect.InvocationTargetException;
 import java.net.InetAddress;
 import java.nio.charset.Charset;
 import java.text.DecimalFormat;
 import java.text.DecimalFormatSymbols;
-import java.util.Arrays;
 import java.util.Date;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Locale;
 
+import com.codahale.metrics.Gauge;
 import org.apache.commons.io.IOUtils;
 import org.apache.lucene.LucenePackage;
 import org.apache.lucene.util.Constants;
-import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
@@ -53,6 +46,7 @@ import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.util.RTimer;
 import org.apache.solr.util.RedactionUtils;
+import org.apache.solr.util.stats.MetricUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -154,7 +148,7 @@ public class SystemInfoHandler extends RequestHandlerBase
   private CoreContainer getCoreContainer(SolrQueryRequest req, SolrCore core) {
     CoreContainer coreContainer;
     if (core != null) {
-       coreContainer = req.getCore().getCoreDescriptor().getCoreContainer();
+       coreContainer = req.getCore().getCoreContainer();
     } else {
       coreContainer = cc;
     }
@@ -207,29 +201,13 @@ public class SystemInfoHandler extends RequestHandlerBase
     
     OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
     info.add(NAME, os.getName()); // add at least this one
-    try {
-      // add remaining ones dynamically using Java Beans API
-      addMXBeanProperties(os, OperatingSystemMXBean.class, info);
-    } catch (IntrospectionException | ReflectiveOperationException e) {
-      log.warn("Unable to fetch properties of OperatingSystemMXBean.", e);
-    }
-
-    // There are some additional beans we want to add (not available on all JVMs):
-    for (String clazz : Arrays.asList(
-        "com.sun.management.OperatingSystemMXBean",
-        "com.sun.management.UnixOperatingSystemMXBean", 
-        "com.ibm.lang.management.OperatingSystemMXBean"
-    )) {
-      try {
-        final Class<? extends PlatformManagedObject> intf = Class.forName(clazz)
-            .asSubclass(PlatformManagedObject.class);
-        addMXBeanProperties(os, intf, info);
-      } catch (ClassNotFoundException e) {
-        // ignore
-      } catch (IntrospectionException | ReflectiveOperationException e) {
-        log.warn("Unable to fetch properties of JVM-specific OperatingSystemMXBean.", e);
+    // add remaining ones dynamically using Java Beans API
+    // also those from JVM implementation-specific classes
+    MetricUtils.addMXBeanMetrics(os, MetricUtils.OS_MXBEAN_CLASSES, null, (name, metric) -> {
+      if (info.get(name) == null) {
+        info.add(name, ((Gauge) metric).getValue());
       }
-    }
+    });
 
     // Try some command line things:
     try { 
@@ -244,34 +222,6 @@ public class SystemInfoHandler extends RequestHandlerBase
   }
   
   /**
-   * Add all bean properties of a {@link PlatformManagedObject} to the given {@link NamedList}.
-   * <p>
-   * If you are running a OpenJDK/Oracle JVM, there are nice properties in:
-   * {@code com.sun.management.UnixOperatingSystemMXBean} and
-   * {@code com.sun.management.OperatingSystemMXBean}
-   */
-  static <T extends PlatformManagedObject> void addMXBeanProperties(T obj, Class<? extends T> intf, NamedList<Object> info)
-      throws IntrospectionException, ReflectiveOperationException {
-    if (intf.isInstance(obj)) {
-      final BeanInfo beanInfo = Introspector.getBeanInfo(intf, intf.getSuperclass(), Introspector.IGNORE_ALL_BEANINFO);
-      for (final PropertyDescriptor desc : beanInfo.getPropertyDescriptors()) {
-        final String name = desc.getName();
-        if (info.get(name) == null) {
-          try {
-            final Object v = desc.getReadMethod().invoke(obj);
-            if(v != null) {
-              info.add(name, v);
-            }
-          } catch (InvocationTargetException ite) {
-            // ignore (some properties throw UOE)
-          }
-        }
-      }
-    }
-  }
-  
-  
-  /**
    * Utility function to execute a function
    */
   private static String execute( String cmd )

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java
index be21733..1f398a9 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java
@@ -17,7 +17,6 @@
 package org.apache.solr.handler.component;
 
 import java.io.IOException;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -161,7 +160,7 @@ public class DebugComponent extends SearchComponent
 
   @SuppressForbidden(reason = "Need currentTimeMillis, only used for naming")
   private String generateRid(ResponseBuilder rb) {
-    String hostName = rb.req.getCore().getCoreDescriptor().getCoreContainer().getHostName();
+    String hostName = rb.req.getCore().getCoreContainer().getHostName();
     return hostName + "-" + rb.req.getCore().getName() + "-" + System.currentTimeMillis() + "-" + ridCounter.getAndIncrement();
   }
 
@@ -380,7 +379,7 @@ public class DebugComponent extends SearchComponent
 
   
   /////////////////////////////////////////////
-  ///  SolrInfoMBean
+  ///  SolrInfoBean
   ////////////////////////////////////////////
 
   @Override
@@ -392,9 +391,4 @@ public class DebugComponent extends SearchComponent
   public Category getCategory() {
     return Category.OTHER;
   }
-
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
index 656ac71..2519a47 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
@@ -17,8 +17,6 @@
 package org.apache.solr.handler.component;
 
 import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -764,7 +762,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
 
 
   ////////////////////////////////////////////
-  ///  SolrInfoMBean
+  ///  SolrInfoBean
   ////////////////////////////////////////////
 
   @Override
@@ -777,17 +775,6 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
     return Category.QUERY;
   }
 
-  @Override
-  public URL[] getDocs() {
-    try {
-      return new URL[]{
-          new URL("http://wiki.apache.org/solr/ExpandComponent")
-      };
-    } catch (MalformedURLException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
   // this reader alters the content of the given reader so it should not
   // delegate the caching stuff
   private static class ReaderWrapper extends FilterLeafReader {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java
index 66b9ab8..80cca15 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java
@@ -18,7 +18,6 @@ package org.apache.solr.handler.component;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -1212,7 +1211,7 @@ public class FacetComponent extends SearchComponent {
 
 
   /////////////////////////////////////////////
-  ///  SolrInfoMBean
+  ///  SolrInfoBean
   ////////////////////////////////////////////
 
   @Override
@@ -1225,11 +1224,6 @@ public class FacetComponent extends SearchComponent {
     return Category.QUERY;
   }
 
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
   /**
    * This class is used exclusively for merging results from each shard
    * in a distributed facet request. It plays no role in the computation

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java b/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java
index d147be2..cc5211b 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java
@@ -17,7 +17,6 @@
 package org.apache.solr.handler.component;
 
 import java.io.IOException;
-import java.net.URL;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -266,7 +265,7 @@ public class HighlightComponent extends SearchComponent implements PluginInfoIni
   }
 
   ////////////////////////////////////////////
-  ///  SolrInfoMBean
+  ///  SolrInfoBean
   ////////////////////////////////////////////
   
   @Override
@@ -278,9 +277,4 @@ public class HighlightComponent extends SearchComponent implements PluginInfoIni
   public Category getCategory() {
     return Category.HIGHLIGHTER;
   }
-  
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
index 40e17a9..8c0a9cb 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
@@ -279,7 +279,7 @@ public class HttpShardHandler extends ShardHandler {
     Map<String,Slice> slices = null;
     CoreDescriptor coreDescriptor = req.getCore().getCoreDescriptor();
     CloudDescriptor cloudDescriptor = coreDescriptor.getCloudDescriptor();
-    ZkController zkController = coreDescriptor.getCoreContainer().getZkController();
+    ZkController zkController = req.getCore().getCoreContainer().getZkController();
 
     final ReplicaListTransformer replicaListTransformer = httpShardHandlerFactory.getReplicaListTransformer(req);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
index 1c016c7..e3787cd 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
@@ -36,7 +36,7 @@ import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.URLUtil;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.update.UpdateShardHandlerConfig;
@@ -124,10 +124,6 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory implements org.
 
   // Configure if the threadpool favours fairness over throughput
   static final String INIT_FAIRNESS_POLICY = "fairnessPolicy";
-  
-  // Turn on retries for certain IOExceptions, many of which can happen
-  // due to connection pooling limitations / races
-  static final String USE_RETRIES = "useRetries";
 
   /**
    * Get {@link ShardHandler} that uses the default http client.
@@ -324,7 +320,7 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory implements org.
 
     if (params.getBool(CommonParams.PREFER_LOCAL_SHARDS, false)) {
       final CoreDescriptor coreDescriptor = req.getCore().getCoreDescriptor();
-      final ZkController zkController = coreDescriptor.getCoreContainer().getZkController();
+      final ZkController zkController = req.getCore().getCoreContainer().getZkController();
       final String preferredHostAddress = (zkController != null) ? zkController.getBaseUrl() : null;
       if (preferredHostAddress == null) {
         log.warn("Couldn't determine current host address to prefer local shards");
@@ -377,10 +373,10 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory implements org.
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
-    String expandedScope = SolrMetricManager.mkName(scope, SolrInfoMBean.Category.QUERY.name());
+    String expandedScope = SolrMetricManager.mkName(scope, SolrInfoBean.Category.QUERY.name());
     clientConnectionManager.initializeMetrics(manager, registry, expandedScope);
     httpRequestExecutor.initializeMetrics(manager, registry, expandedScope);
-    commExecutor = MetricUtils.instrumentedExecutorService(commExecutor,
+    commExecutor = MetricUtils.instrumentedExecutorService(commExecutor, null,
         manager.registry(registry),
         SolrMetricManager.mkName("httpShardExecutor", expandedScope, "threadPool"));
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
index ffb5858..fd9d37d 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/MoreLikeThisComponent.java
@@ -18,7 +18,6 @@ package org.apache.solr.handler.component;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -414,7 +413,7 @@ public class MoreLikeThisComponent extends SearchComponent {
   }
   
   // ///////////////////////////////////////////
-  // / SolrInfoMBean
+  // / SolrInfoBean
   // //////////////////////////////////////////
   
   @Override
@@ -426,9 +425,4 @@ public class MoreLikeThisComponent extends SearchComponent {
   public Category getCategory() {
     return Category.QUERY;
   }
-
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index 08a0e84..1d88adc 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -20,7 +20,6 @@ import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -186,11 +185,6 @@ public class QueryComponent extends SearchComponent
       }
 
       rb.setSortSpec( parser.getSortSpec(true) );
-      for (SchemaField sf:rb.getSortSpec().getSchemaFields()) {
-        if (sf != null && sf.getType().isPointField() && !sf.hasDocValues()) {
-          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"Can't sort on a point field without docValues");
-        }
-      }
       rb.setQparser(parser);
 
       final String cursorStr = rb.req.getParams().get(CursorMarkParams.CURSOR_MARK_PARAM);
@@ -1378,7 +1372,7 @@ public class QueryComponent extends SearchComponent
   }
 
   /////////////////////////////////////////////
-  ///  SolrInfoMBean
+  ///  SolrInfoBean
   ////////////////////////////////////////////
 
   @Override
@@ -1391,11 +1385,6 @@ public class QueryComponent extends SearchComponent
     return Category.QUERY;
   }
 
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
   /**
    * Fake scorer for a single document
    *

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
index c12902e..4fe1d71 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
@@ -24,8 +24,6 @@ import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.lang.invoke.MethodHandles;
-import java.net.MalformedURLException;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -219,7 +217,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
         boolean exists = false;
 
         // check if using ZooKeeper
-        ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+        ZkController zkController = core.getCoreContainer().getZkController();
         if (zkController != null) {
           // TODO : shouldn't have to keep reading the config name when it has been read before
           exists = zkController.configFileExists(zkController.getZkStateReader().readConfigName(core.getCoreDescriptor().getCloudDescriptor().getCollectionName()), f);
@@ -274,7 +272,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
         
         Config cfg;
         
-        ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+        ZkController zkController = core.getCoreContainer().getZkController();
         if (zkController != null) {
           cfg = new Config(core.getResourceLoader(), f, null, null);
         } else {
@@ -597,7 +595,7 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
   }
 
   //---------------------------------------------------------------------------------
-  // SolrInfoMBean
+  // SolrInfoBean
   //---------------------------------------------------------------------------------
 
   @Override
@@ -605,16 +603,6 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
     return "Query Boosting -- boost particular documents for a given query";
   }
 
-  @Override
-  public URL[] getDocs() {
-    try {
-      return new URL[]{
-          new URL("http://wiki.apache.org/solr/QueryElevationComponent")
-      };
-    } catch (MalformedURLException e) {
-      throw new RuntimeException(e);
-    }
-  }
   class ElevationComparatorSource extends FieldComparatorSource {
   private QueryElevationComponent.ElevationObj elevations;
   private SentinelIntSet ordSet; //the key half of the map

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 882decb..c0ceddb 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -18,7 +18,6 @@ package org.apache.solr.handler.component;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -140,7 +139,7 @@ public class RealTimeGetComponent extends SearchComponent
               .getNewestSearcher(false);
           SolrIndexSearcher searcher = searchHolder.get();
           try {
-            log.debug(req.getCore().getCoreDescriptor()
+            log.debug(req.getCore()
                 .getCoreContainer().getZkController().getNodeName()
                 + " min count to sync to (from most recent searcher view) "
                 + searcher.search(new MatchAllDocsQuery(), 1).totalHits);
@@ -761,7 +760,7 @@ public class RealTimeGetComponent extends SearchComponent
           out.add(f);
         }
       } else {
-        log.debug("Don't know how to handle field " + f);
+        log.debug("Don't know how to handle field {}", f);
       }
     }
 
@@ -789,7 +788,7 @@ public class RealTimeGetComponent extends SearchComponent
 
     // TODO: handle collection=...?
 
-    ZkController zkController = rb.req.getCore().getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zkController = rb.req.getCore().getCoreContainer().getZkController();
 
     // if shards=... then use that
     if (zkController != null && params.get(ShardParams.SHARDS) == null) {
@@ -927,7 +926,7 @@ public class RealTimeGetComponent extends SearchComponent
                                                                                                
 
   ////////////////////////////////////////////
-  ///  SolrInfoMBean
+  ///  SolrInfoBean
   ////////////////////////////////////////////
 
   @Override
@@ -940,13 +939,6 @@ public class RealTimeGetComponent extends SearchComponent
     return Category.QUERY;
   }
 
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
-  
-  
   public void processGetFingeprint(ResponseBuilder rb) throws IOException {
     SolrQueryRequest req = rb.req;
     SolrParams params = req.getParams();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java
index 6ef0ee4..c615c5a 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SearchComponent.java
@@ -17,13 +17,15 @@
 package org.apache.solr.handler.component;
 
 import java.io.IOException;
-import java.net.URL;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
+import java.util.Set;
 
+import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.search.facet.FacetModule;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
@@ -33,12 +35,16 @@ import org.apache.solr.util.plugin.NamedListInitializedPlugin;
  *
  * @since solr 1.3
  */
-public abstract class SearchComponent implements SolrInfoMBean, NamedListInitializedPlugin
+public abstract class SearchComponent implements SolrInfoBean, NamedListInitializedPlugin
 {
   /**
    * The name given to this component in solrconfig.xml file
    */
   private String name = this.getClass().getName();
+
+  protected Set<String> metricNames = new HashSet<>();
+  protected MetricRegistry registry;
+
   /**
    * Prepare the response.  Guaranteed to be called before any SearchComponent {@link #process(org.apache.solr.handler.component.ResponseBuilder)} method.
    * Called for every incoming request.
@@ -103,31 +109,24 @@ public abstract class SearchComponent implements SolrInfoMBean, NamedListInitial
 
   @Override
   public abstract String getDescription();
-  @Override
-  public String getSource() { return null; }
-  
-  @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-  
+
   @Override
   public Category getCategory() {
     return Category.OTHER;
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;  // this can be overridden, but not required
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public NamedList getStatistics() {
-    return null;
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   public static final Map<String, Class<? extends SearchComponent>> standard_components;
-  ;
+
 
   static {
     HashMap<String, Class<? extends SearchComponent>> map = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
index a00839b..8b732da 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
@@ -129,7 +129,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
         "First/Last components only valid if you do not declare 'components'");
 
     if (shfInfo == null) {
-      shardHandlerFactory = core.getCoreDescriptor().getCoreContainer().getShardHandlerFactory();
+      shardHandlerFactory = core.getCoreContainer().getShardHandlerFactory();
     } else {
       shardHandlerFactory = core.createInitInstance(shfInfo, ShardHandlerFactory.class, null, null);
       core.addCloseHook(new CloseHook() {
@@ -185,12 +185,12 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
         dbgCmp = (DebugComponent) comp;
       } else {
         components.add(comp);
-        log.debug("Adding  component:"+comp);
+        log.debug("Adding  component:{}", comp);
       }
     }
     if (makeDebugLast == true && dbgCmp != null){
       components.add(dbgCmp);
-      log.debug("Adding  debug component:" + dbgCmp);
+      log.debug("Adding  debug component:{}", dbgCmp);
     }
     this.components = components;
   }
@@ -211,7 +211,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
   private ShardHandler getAndPrepShardHandler(SolrQueryRequest req, ResponseBuilder rb) {
     ShardHandler shardHandler = null;
 
-    CoreContainer cc = req.getCore().getCoreDescriptor().getCoreContainer();
+    CoreContainer cc = req.getCore().getCoreContainer();
     boolean isZkAware = cc.isZooKeeperAware();
     rb.isDistrib = req.getParams().getBool(DISTRIB, isZkAware);
     if (!rb.isDistrib) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
index 2f805f4..4e3cd12 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
@@ -853,7 +853,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
   }
 
   // ///////////////////////////////////////////
-  // / SolrInfoMBean
+  // / SolrInfoBean
   // //////////////////////////////////////////
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java b/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java
index 6a6e9be..8ecd51c 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java
@@ -160,7 +160,7 @@ public class StatsComponent extends SearchComponent {
   }
 
   /////////////////////////////////////////////
-  ///  SolrInfoMBean
+  ///  SolrInfoBean
   ////////////////////////////////////////////
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
index bb87440..4ca6ce4 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
@@ -47,6 +47,9 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrEventListener;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.spelling.suggest.SolrSuggester;
 import org.apache.solr.spelling.suggest.SuggesterOptions;
@@ -61,7 +64,7 @@ import org.slf4j.LoggerFactory;
  * Responsible for routing commands and queries to the appropriate {@link SolrSuggester}
  * and for initializing them as specified by SolrConfig
  */
-public class SuggestComponent extends SearchComponent implements SolrCoreAware, SuggesterParams, Accountable {
+public class SuggestComponent extends SearchComponent implements SolrCoreAware, SuggesterParams, Accountable, SolrMetricProducer {
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   /** Name used to identify whether the user query concerns this component */
@@ -89,7 +92,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
    * Key is the dictionary name used in SolrConfig, value is the corresponding {@link SolrSuggester}
    */
   protected Map<String, SolrSuggester> suggesters = new ConcurrentHashMap<>();
-  
+
   /** Container for various labels used in the responses generated by this component */
   private static class SuggesterResultLabels {
     static final String SUGGEST = "suggest";
@@ -345,16 +348,18 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   }
 
   @Override
-  public NamedList getStatistics() {
-    NamedList<String> stats = new SimpleOrderedMap<>();
-    stats.add("totalSizeInBytes", String.valueOf(ramBytesUsed()));
-    for (Map.Entry<String, SolrSuggester> entry : suggesters.entrySet()) {
-      SolrSuggester suggester = entry.getValue();
-      stats.add(entry.getKey(), suggester.toString());
-    }
-    return stats;
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    manager.registerGauge(this, registryName, () -> ramBytesUsed(), true, "totalSizeInBytes", getCategory().toString(), scope);
+    MetricsMap suggestersMap = new MetricsMap((detailed, map) -> {
+      for (Map.Entry<String, SolrSuggester> entry : suggesters.entrySet()) {
+        SolrSuggester suggester = entry.getValue();
+        map.put(entry.getKey(), suggester.toString());
+      }
+    });
+    manager.registerGauge(this, registryName, suggestersMap, true, "suggesters", getCategory().toString(), scope);
   }
-  
+
   @Override
   public long ramBytesUsed() {
     long sizeInBytes = 0;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index b7f552b..db77101 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -51,6 +51,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 import org.apache.solr.client.solrj.io.stream.metrics.*;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.handler.StreamHandler;
 
 import java.io.IOException;
 import java.util.*;
@@ -158,6 +159,10 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
       throw new RuntimeException(e);
     }
 
+    StreamContext streamContext = new StreamContext();
+    streamContext.setSolrClientCache(StreamHandler.getClientCache());
+    tupleStream.setStreamContext(streamContext);
+
     final TupleStream finalStream = tupleStream;
 
     return new AbstractEnumerable<Object>() {
@@ -795,7 +800,6 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
                                   List<Pair<String, String>> metricPairs,
                                   List<Map.Entry<String, Class>> fields) {
 
-
     Map<String, Class> fmap = new HashMap();
     for(Map.Entry<String, Class> entry : fields) {
       fmap.put(entry.getKey(), entry.getValue());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
index 24304d0a..7e56ee4 100644
--- a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
@@ -128,40 +128,58 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
 
     // Load the fragmenters
     SolrFragmenter frag = solrCore.initPlugins(info.getChildren("fragmenter") , fragmenters,SolrFragmenter.class,null);
-    if (frag == null) frag = new GapFragmenter();
+    if (frag == null) {
+      frag = new GapFragmenter();
+      solrCore.initDefaultPlugin(frag, SolrFragmenter.class);
+    }
     fragmenters.put("", frag);
     fragmenters.put(null, frag);
 
     // Load the formatters
     SolrFormatter fmt = solrCore.initPlugins(info.getChildren("formatter"), formatters,SolrFormatter.class,null);
-    if (fmt == null) fmt = new HtmlFormatter();
+    if (fmt == null) {
+      fmt = new HtmlFormatter();
+      solrCore.initDefaultPlugin(fmt, SolrFormatter.class);
+    }
     formatters.put("", fmt);
     formatters.put(null, fmt);
 
     // Load the encoders
     SolrEncoder enc = solrCore.initPlugins(info.getChildren("encoder"), encoders,SolrEncoder.class,null);
-    if (enc == null) enc = new DefaultEncoder();
+    if (enc == null) {
+      enc = new DefaultEncoder();
+      solrCore.initDefaultPlugin(enc, SolrEncoder.class);
+    }
     encoders.put("", enc);
     encoders.put(null, enc);
 
     // Load the FragListBuilders
     SolrFragListBuilder fragListBuilder = solrCore.initPlugins(info.getChildren("fragListBuilder"),
         fragListBuilders, SolrFragListBuilder.class, null );
-    if( fragListBuilder == null ) fragListBuilder = new SimpleFragListBuilder();
+    if( fragListBuilder == null ) {
+      fragListBuilder = new SimpleFragListBuilder();
+      solrCore.initDefaultPlugin(fragListBuilder, SolrFragListBuilder.class);
+    }
     fragListBuilders.put( "", fragListBuilder );
     fragListBuilders.put( null, fragListBuilder );
 
     // Load the FragmentsBuilders
     SolrFragmentsBuilder fragsBuilder = solrCore.initPlugins(info.getChildren("fragmentsBuilder"),
         fragmentsBuilders, SolrFragmentsBuilder.class, null);
-    if( fragsBuilder == null ) fragsBuilder = new ScoreOrderFragmentsBuilder();
+    if( fragsBuilder == null ) {
+      fragsBuilder = new ScoreOrderFragmentsBuilder();
+      solrCore.initDefaultPlugin(fragsBuilder, SolrFragmentsBuilder.class);
+    }
     fragmentsBuilders.put( "", fragsBuilder );
     fragmentsBuilders.put( null, fragsBuilder );
 
     // Load the BoundaryScanners
     SolrBoundaryScanner boundaryScanner = solrCore.initPlugins(info.getChildren("boundaryScanner"),
         boundaryScanners, SolrBoundaryScanner.class, null);
-    if(boundaryScanner == null) boundaryScanner = new SimpleBoundaryScanner();
+    if(boundaryScanner == null) {
+      boundaryScanner = new SimpleBoundaryScanner();
+      solrCore.initDefaultPlugin(boundaryScanner, SolrBoundaryScanner.class);
+    }
     boundaryScanners.put("", boundaryScanner);
     boundaryScanners.put(null, boundaryScanner);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/GapFragmenter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/GapFragmenter.java b/solr/core/src/java/org/apache/solr/highlight/GapFragmenter.java
index 64cb280..6a11bb9 100644
--- a/solr/core/src/java/org/apache/solr/highlight/GapFragmenter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/GapFragmenter.java
@@ -30,7 +30,7 @@ public class GapFragmenter extends HighlightingPluginBase implements SolrFragmen
   @Override
   public Fragmenter getFragmenter(String fieldName, SolrParams params )
   {
-    numRequests++;
+    numRequests.inc();
     params = SolrParams.wrapDefaults(params, defaults);
     
     int fragsize = params.getFieldInt( fieldName, HighlightParams.FRAGSIZE, 100 );

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java b/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java
index f60ada8..7acaacd 100644
--- a/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java
+++ b/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java
@@ -16,21 +16,27 @@
  */
 package org.apache.solr.highlight;
 
-import java.net.URL;
+import java.util.HashSet;
+import java.util.Set;
 
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 
 /**
  * 
  * @since solr 1.3
  */
-public abstract class HighlightingPluginBase implements SolrInfoMBean
+public abstract class HighlightingPluginBase implements SolrInfoBean, SolrMetricProducer
 {
-  protected long numRequests;
+  protected Counter numRequests;
   protected SolrParams defaults;
+  protected Set<String> metricNames = new HashSet<>(1);
+  protected MetricRegistry registry;
 
   public void init(NamedList args) {
     if( args != null ) {
@@ -50,14 +56,7 @@ public abstract class HighlightingPluginBase implements SolrInfoMBean
 
   @Override
   public abstract String getDescription();
-  @Override
-  public String getSource() { return null; }
-  
-  @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-  
+
   @Override
   public Category getCategory()
   {
@@ -65,15 +64,19 @@ public abstract class HighlightingPluginBase implements SolrInfoMBean
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;  // this can be overridden, but not required
+  public Set<String> getMetricNames() {
+    return metricNames;
+  }
+
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   @Override
-  public NamedList getStatistics() {
-    NamedList<Long> lst = new SimpleOrderedMap<>();
-    lst.add("requests", numRequests);
-    return lst;
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    numRequests = manager.counter(this, registryName, "requests", getCategory().toString(), scope);
   }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/HtmlFormatter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/HtmlFormatter.java b/solr/core/src/java/org/apache/solr/highlight/HtmlFormatter.java
index 842d5cd..0950c53 100644
--- a/solr/core/src/java/org/apache/solr/highlight/HtmlFormatter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/HtmlFormatter.java
@@ -29,7 +29,7 @@ public class HtmlFormatter extends HighlightingPluginBase implements SolrFormatt
   @Override
   public Formatter getFormatter(String fieldName, SolrParams params ) 
   {
-    numRequests++;
+    numRequests.inc();
     params = SolrParams.wrapDefaults(params, defaults);
 
     return new SimpleHTMLFormatter(

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java b/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java
index b755b2d..ffefbad 100644
--- a/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/RegexFragmenter.java
@@ -60,7 +60,7 @@ public class RegexFragmenter extends HighlightingPluginBase implements SolrFragm
   @Override
   public Fragmenter getFragmenter(String fieldName, SolrParams params )
   { 
-    numRequests++;
+    numRequests.inc();
     params = SolrParams.wrapDefaults(params, defaults);
 
     int fragsize  = params.getFieldInt(   fieldName, HighlightParams.FRAGSIZE,  LuceneRegexFragmenter.DEFAULT_FRAGMENT_SIZE );

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SimpleFragListBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SimpleFragListBuilder.java b/solr/core/src/java/org/apache/solr/highlight/SimpleFragListBuilder.java
index ed5430c..7e30a92 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SimpleFragListBuilder.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SimpleFragListBuilder.java
@@ -28,7 +28,7 @@ public class SimpleFragListBuilder extends HighlightingPluginBase implements
     // If that ever changes, it should wrap them with defaults...
     // params = SolrParams.wrapDefaults(params, defaults)
 
-    numRequests++;
+    numRequests.inc();
 
     return new org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SingleFragListBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SingleFragListBuilder.java b/solr/core/src/java/org/apache/solr/highlight/SingleFragListBuilder.java
index 0b79929..0dfa16e 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SingleFragListBuilder.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SingleFragListBuilder.java
@@ -28,7 +28,7 @@ public class SingleFragListBuilder extends HighlightingPluginBase implements
     // If that ever changes, it should wrap them with defaults...
     // params = SolrParams.wrapDefaults(params, defaults)
 
-    numRequests++;
+    numRequests.inc();
 
     return new org.apache.lucene.search.vectorhighlight.SingleFragListBuilder();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SolrBoundaryScanner.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrBoundaryScanner.java b/solr/core/src/java/org/apache/solr/highlight/SolrBoundaryScanner.java
index 6f442f7..ddbbfde 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrBoundaryScanner.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrBoundaryScanner.java
@@ -18,14 +18,14 @@ package org.apache.solr.highlight;
 
 import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
 public abstract class SolrBoundaryScanner extends HighlightingPluginBase implements
-    SolrInfoMBean, NamedListInitializedPlugin {
+    SolrInfoBean, NamedListInitializedPlugin {
 
   public BoundaryScanner getBoundaryScanner(String fieldName, SolrParams params){
-    numRequests++;
+    numRequests.inc();
     params = SolrParams.wrapDefaults(params, defaults);
 
     return get(fieldName, params);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java b/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java
index 9f49228..7b78a06 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrEncoder.java
@@ -19,10 +19,10 @@ package org.apache.solr.highlight;
 import org.apache.lucene.search.highlight.Encoder;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
-public interface SolrEncoder extends SolrInfoMBean, NamedListInitializedPlugin {
+public interface SolrEncoder extends SolrInfoBean, NamedListInitializedPlugin {
 
   /** <code>init</code> will be called just once, immediately after creation.
    * <p>The args are user-level initialization parameters that

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java b/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java
index a8f51db..1a6443e 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrFormatter.java
@@ -19,10 +19,10 @@ package org.apache.solr.highlight;
 import org.apache.lucene.search.highlight.Formatter;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
-public interface SolrFormatter extends SolrInfoMBean, NamedListInitializedPlugin {
+public interface SolrFormatter extends SolrInfoBean, NamedListInitializedPlugin {
 
   /** <code>init</code> will be called just once, immediately after creation.
    * <p>The args are user-level initialization parameters that

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java b/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java
index f0c36b4..87da235 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrFragListBuilder.java
@@ -19,10 +19,10 @@ package org.apache.solr.highlight;
 import org.apache.lucene.search.vectorhighlight.FragListBuilder;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
-public interface SolrFragListBuilder extends SolrInfoMBean, NamedListInitializedPlugin {
+public interface SolrFragListBuilder extends SolrInfoBean, NamedListInitializedPlugin {
 
   /** <code>init</code> will be called just once, immediately after creation.
    * <p>The args are user-level initialization parameters that

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java b/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java
index 547506f..98c3056 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrFragmenter.java
@@ -19,10 +19,10 @@ package org.apache.solr.highlight;
 import org.apache.lucene.search.highlight.Fragmenter;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
-public interface SolrFragmenter extends SolrInfoMBean, NamedListInitializedPlugin {
+public interface SolrFragmenter extends SolrInfoBean, NamedListInitializedPlugin {
 
   /** <code>init</code> will be called just once, immediately after creation.
    * <p>The args are user-level initialization parameters that

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SolrFragmentsBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrFragmentsBuilder.java b/solr/core/src/java/org/apache/solr/highlight/SolrFragmentsBuilder.java
index 78ea5a4..023d55a 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrFragmentsBuilder.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrFragmentsBuilder.java
@@ -21,11 +21,11 @@ import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.HighlightParams;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
 public abstract class SolrFragmentsBuilder extends HighlightingPluginBase
-  implements SolrInfoMBean, NamedListInitializedPlugin {
+  implements SolrInfoBean, NamedListInitializedPlugin {
   
   public static final String DEFAULT_PRE_TAGS = "<em>";
   public static final String DEFAULT_POST_TAGS = "</em>";
@@ -37,7 +37,7 @@ public abstract class SolrFragmentsBuilder extends HighlightingPluginBase
    * @return An appropriate {@link org.apache.lucene.search.vectorhighlight.FragmentsBuilder}.
    */
   public FragmentsBuilder getFragmentsBuilder(SolrParams params, BoundaryScanner bs) {
-    numRequests++;
+    numRequests.inc();
     params = SolrParams.wrapDefaults(params, defaults);
 
     return getFragmentsBuilder( params, getPreTags( params, null ), getPostTags( params, null ), bs );

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java
index e526c57..ade4c6b 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java
@@ -24,9 +24,9 @@ import org.apache.solr.search.DocList;
 import org.apache.solr.util.SolrPluginUtils;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
+import java.util.LinkedHashSet;
+import java.util.Set;
 
 public abstract class SolrHighlighter
 {
@@ -60,27 +60,19 @@ public abstract class SolrHighlighter
       if (emptyArray(defaultFields)) {
         String defaultSearchField = request.getSchema().getDefaultSearchFieldName();
         fields = null == defaultSearchField ? new String[]{} : new String[]{defaultSearchField};
-      }
-      else {
+      } else {
         fields = defaultFields;
       }
-    }
-    else if (fields.length == 1) {
-      if (fields[0].contains("*")) {
-        // create a Java regular expression from the wildcard string
-        String fieldRegex = fields[0].replaceAll("\\*", ".*");
-        Collection<String> storedHighlightFieldNames = request.getSearcher().getDocFetcher().getStoredHighlightFieldNames();
-        List<String> storedFieldsToHighlight = new ArrayList<>();
-        for (String storedFieldName: storedHighlightFieldNames) {
-          if (storedFieldName.matches(fieldRegex)) {
-            storedFieldsToHighlight.add(storedFieldName);
-          }
-        }
-        fields = storedFieldsToHighlight.toArray(new String[storedFieldsToHighlight.size()]);
-      } else {
-        // if there's a single request/handler value, it may be a space/comma separated list
-        fields = SolrPluginUtils.split(fields[0]);
+    } else {
+      Set<String> expandedFields = new LinkedHashSet<String>();
+      Collection<String> storedHighlightFieldNames = request.getSearcher().getDocFetcher().getStoredHighlightFieldNames();
+      for (String field : fields) {
+        expandWildcardsInHighlightFields(
+            expandedFields,
+            storedHighlightFieldNames,
+            SolrPluginUtils.split(field));
       }
+      fields = expandedFields.toArray(new String[]{});
     }
 
     // Trim them now in case they haven't been yet.  Not needed for all code-paths above but do it here.
@@ -94,6 +86,25 @@ public abstract class SolrHighlighter
     return (arr == null || arr.length == 0 || arr[0] == null || arr[0].trim().length() == 0);
   }
 
+  static private void expandWildcardsInHighlightFields (
+      Set<String> expandedFields,
+      Collection<String> storedHighlightFieldNames,
+      String... fields) {
+    for (String field : fields) {
+      if (field.contains("*")) {
+        // create a Java regular expression from the wildcard string
+        String fieldRegex = field.replaceAll("\\*", ".*");
+        for (String storedFieldName : storedHighlightFieldNames) {
+          if (storedFieldName.matches(fieldRegex)) {
+            expandedFields.add(storedFieldName);
+          }
+        }
+      } else {
+        expandedFields.add(field);
+      }
+    }
+  }
+
   /**
    * Generates a list of Highlighted query fragments for each item in a list
    * of documents, or returns null if highlighting is disabled.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/highlight/WeightedFragListBuilder.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/highlight/WeightedFragListBuilder.java b/solr/core/src/java/org/apache/solr/highlight/WeightedFragListBuilder.java
index f44c0f0..b97cc31 100644
--- a/solr/core/src/java/org/apache/solr/highlight/WeightedFragListBuilder.java
+++ b/solr/core/src/java/org/apache/solr/highlight/WeightedFragListBuilder.java
@@ -28,7 +28,7 @@ public class WeightedFragListBuilder extends HighlightingPluginBase implements
     // If that ever changes, it should wrap them with defaults...
     // params = SolrParams.wrapDefaults(params, defaults)
     
-    numRequests++;
+    numRequests.inc();
     
     return new org.apache.lucene.search.vectorhighlight.WeightedFragListBuilder();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java b/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java
index 5ff7742..11c8b84 100644
--- a/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java
+++ b/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java
@@ -34,7 +34,7 @@ import org.slf4j.MDC;
 /**
  * Set's per thread context info for logging. Nested calls will use the top level parent for all context. The first
  * caller always owns the context until it calls {@link #clear()}. Always call {@link #setCore(SolrCore)} or
- * {@link #setCoreDescriptor(CoreDescriptor)} and then {@link #clear()} in a finally block.
+ * {@link #setCoreDescriptor(CoreContainer, CoreDescriptor)} and then {@link #clear()} in a finally block.
  */
 public class MDCLoggingContext {
   // When a thread sets context and finds that the context is already set, we should noop and ignore the finally clear
@@ -105,12 +105,11 @@ public class MDCLoggingContext {
   
   public static void setCore(SolrCore core) {
     if (core != null) {
-      CoreDescriptor cd = core.getCoreDescriptor();
-      setCoreDescriptor(cd);
+      setCoreDescriptor(core.getCoreContainer(), core.getCoreDescriptor());
     }
   }
   
-  public static void setCoreDescriptor(CoreDescriptor cd) {
+  public static void setCoreDescriptor(CoreContainer coreContainer, CoreDescriptor cd) {
     if (cd != null) {
       int callDepth = CALL_DEPTH.get();
       CALL_DEPTH.set(callDepth + 1);
@@ -119,9 +118,8 @@ public class MDCLoggingContext {
       }
       
       setCoreName(cd.getName());
-      CoreContainer cc = cd.getCoreContainer();
-      if (cc != null) {
-        ZkController zkController = cc.getZkController();
+      if (coreContainer != null) {
+        ZkController zkController = coreContainer.getZkController();
         if (zkController != null) {
           setNodeName(zkController.getNodeName());
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/AltBufferPoolMetricSet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/AltBufferPoolMetricSet.java b/solr/core/src/java/org/apache/solr/metrics/AltBufferPoolMetricSet.java
new file mode 100644
index 0000000..f9d3a43
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/AltBufferPoolMetricSet.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics;
+
+import java.lang.management.BufferPoolMXBean;
+import java.lang.management.ManagementFactory;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Metric;
+import com.codahale.metrics.MetricSet;
+
+/**
+ * This is an alternative implementation of {@link com.codahale.metrics.jvm.BufferPoolMetricSet} that
+ * doesn't need an MBean server.
+ */
+public class AltBufferPoolMetricSet implements MetricSet {
+
+  @Override
+  public Map<String, Metric> getMetrics() {
+    final Map<String, Metric> metrics = new HashMap<>();
+    List<BufferPoolMXBean> pools = ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class);
+    for (final BufferPoolMXBean pool : pools) {
+      String name = pool.getName();
+      metrics.put(name + ".Count", (Gauge<Long>)() -> pool.getCount());
+      metrics.put(name + ".MemoryUsed", (Gauge<Long>)() -> pool.getMemoryUsed());
+      metrics.put(name + ".TotalCapacity", (Gauge<Long>)() -> pool.getTotalCapacity());
+    }
+    return metrics;
+  }
+}


[04/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index f153a1b..ce0a4ce 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -127,63 +127,119 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     CloudSolrStream stream;
     List<Tuple> tuples;
-    
-    // Basic test
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
 
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 2, 1, 3, 4);
-    assertLong(tuples.get(0), "a_i", 0);
+    try {
+      // Basic test
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    // Basic w/aliases
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", aliases=\"a_i=alias.a_i, a_s=name\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+      assertLong(tuples.get(0), "a_i", 0);
 
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 2, 1, 3, 4);
-    assertLong(tuples.get(0), "alias.a_i", 0);
-    assertString(tuples.get(0), "name", "hello0");
+      // Basic w/aliases
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", aliases=\"a_i=alias.a_i, a_s=name\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    // Basic filtered test
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+      assertLong(tuples.get(0), "alias.a_i", 0);
+      assertString(tuples.get(0), "name", "hello0");
 
-    assert(tuples.size() == 3);
-    assertOrder(tuples, 0, 3, 4);
-    assertLong(tuples.get(1), "a_i", 3);
-
-    try {
-      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
+      // Basic filtered test
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
       stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
       tuples = getTuples(stream);
-      throw new Exception("Should be an exception here");
-    } catch(Exception e) {
-      assertTrue(e.getMessage().contains("q param expected for search function"));
-    }
 
-    try {
-      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"blah\", sort=\"a_f asc, a_i asc\")");
+      assert (tuples.size() == 3);
+      assertOrder(tuples, 0, 3, 4);
+      assertLong(tuples.get(1), "a_i", 3);
+
+      try {
+        expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
+        stream = new CloudSolrStream(expression, factory);
+        stream.setStreamContext(streamContext);
+        tuples = getTuples(stream);
+        throw new Exception("Should be an exception here");
+      } catch (Exception e) {
+        assertTrue(e.getMessage().contains("q param expected for search function"));
+      }
+
+      try {
+        expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"blah\", sort=\"a_f asc, a_i asc\")");
+        stream = new CloudSolrStream(expression, factory);
+        stream.setStreamContext(streamContext);
+        tuples = getTuples(stream);
+        throw new Exception("Should be an exception here");
+      } catch (Exception e) {
+        assertTrue(e.getMessage().contains("fl param expected for search function"));
+      }
+
+      try {
+        expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"blah\", fl=\"id, a_f\", sort=\"a_f\")");
+        stream = new CloudSolrStream(expression, factory);
+        stream.setStreamContext(streamContext);
+        tuples = getTuples(stream);
+        throw new Exception("Should be an exception here");
+      } catch (Exception e) {
+        assertTrue(e.getMessage().contains("Invalid sort spec"));
+      }
+
+      // Test with shards param
+
+      List<String> shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext);
+
+      Map<String, List<String>> shardsMap = new HashMap();
+      shardsMap.put("myCollection", shardUrls);
+      StreamContext context = new StreamContext();
+      context.put("shards", shardsMap);
+      context.setSolrClientCache(solrClientCache);
+
+      // Basic test
+      expression = StreamExpressionParser.parse("search(myCollection, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
       stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(context);
       tuples = getTuples(stream);
-      throw new Exception("Should be an exception here");
-    } catch(Exception e) {
-      assertTrue(e.getMessage().contains("fl param expected for search function"));
-    }
 
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+      assertLong(tuples.get(0), "a_i", 0);
 
-    try {
-      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"blah\", fl=\"id, a_f\", sort=\"a_f\")");
-      stream = new CloudSolrStream(expression, factory);
+
+      //Execersise the /stream hander
+
+      //Add the shards http parameter for the myCollection
+      StringBuilder buf = new StringBuilder();
+      for (String shardUrl : shardUrls) {
+        if (buf.length() > 0) {
+          buf.append(",");
+        }
+        buf.append(shardUrl);
+      }
+
+      ModifiableSolrParams solrParams = new ModifiableSolrParams();
+      solrParams.add("qt", "/stream");
+      solrParams.add("expr", "search(myCollection, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
+      solrParams.add("myCollection.shards", buf.toString());
+      SolrStream solrStream = new SolrStream(shardUrls.get(0), solrParams);
+      stream.setStreamContext(context);
       tuples = getTuples(stream);
-      throw new Exception("Should be an exception here");
-    } catch(Exception e) {
-      assertTrue(e.getMessage().contains("Invalid sort spec"));
-    }
 
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+      assertLong(tuples.get(0), "a_i", 0);
+
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -200,55 +256,66 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamFactory factory = new StreamFactory();
     StreamExpression expression;
     CloudSolrStream stream;
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
     List<Tuple> tuples;
-    
-    // Basic test
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", zkHost=" + cluster.getZkServer().getZkAddress() + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
 
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 2, 1, 3, 4);
-    assertLong(tuples.get(0), "a_i", 0);
+    try {
+      // Basic test
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", zkHost=" + cluster.getZkServer().getZkAddress() + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    // Basic w/aliases
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", aliases=\"a_i=alias.a_i, a_s=name\", zkHost=" + cluster.getZkServer().getZkAddress() + ")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+      assertLong(tuples.get(0), "a_i", 0);
 
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 2, 1, 3, 4);
-    assertLong(tuples.get(0), "alias.a_i", 0);
-    assertString(tuples.get(0), "name", "hello0");
+      // Basic w/aliases
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", aliases=\"a_i=alias.a_i, a_s=name\", zkHost=" + cluster.getZkServer().getZkAddress() + ")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    // Basic filtered test
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", zkHost="
-        + cluster.getZkServer().getZkAddress() + ", sort=\"a_f asc, a_i asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+      assertLong(tuples.get(0), "alias.a_i", 0);
+      assertString(tuples.get(0), "name", "hello0");
 
-    assert(tuples.size() == 3);
-    assertOrder(tuples, 0, 3, 4);
-    assertLong(tuples.get(1), "a_i", 3);
+      // Basic filtered test
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", zkHost="
+          + cluster.getZkServer().getZkAddress() + ", sort=\"a_f asc, a_i asc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
+      assert (tuples.size() == 3);
+      assertOrder(tuples, 0, 3, 4);
+      assertLong(tuples.get(1), "a_i", 3);
 
-    // Test a couple of multile field lists.
-    expression = StreamExpressionParser.parse("search(collection1, fq=\"a_s:hello0\", fq=\"a_s:hello1\", q=\"id:(*)\", " +
-        "zkHost=" + cluster.getZkServer().getZkAddress()+ ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
 
-    assertEquals("fq clauses should have prevented any docs from coming back", tuples.size(), 0);
+      // Test a couple of multile field lists.
+      expression = StreamExpressionParser.parse("search(collection1, fq=\"a_s:hello0\", fq=\"a_s:hello1\", q=\"id:(*)\", " +
+          "zkHost=" + cluster.getZkServer().getZkAddress() + ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
+      assertEquals("fq clauses should have prevented any docs from coming back", tuples.size(), 0);
 
-    expression = StreamExpressionParser.parse("search(collection1, fq=\"a_s:(hello0 OR hello1)\", q=\"id:(*)\", " +
-        "zkHost=" + cluster.getZkServer().getZkAddress() + ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
 
-    assertEquals("Combining an f1 clause should show us 2 docs", tuples.size(), 2);
-    
-        
+      expression = StreamExpressionParser.parse("search(collection1, fq=\"a_s:(hello0 OR hello1)\", q=\"id:(*)\", " +
+          "zkHost=" + cluster.getZkServer().getZkAddress() + ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assertEquals("Combining an f1 clause should show us 2 docs", tuples.size(), 2);
+
+    } finally {
+      solrClientCache.close();
+    }
 
   }
 
@@ -315,43 +382,53 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
-    
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
     StreamFactory factory = new StreamFactory()
       .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
       .withFunctionName("search", CloudSolrStream.class)
       .withFunctionName("unique", UniqueStream.class);
-    
-    // Basic test
-    expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")");
-    stream = new UniqueStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 4);
-    assertOrder(tuples, 0, 1, 3, 4);
 
-    // Basic test desc
-    expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc, a_i desc\"), over=\"a_f\")");
-    stream = new UniqueStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 4);
-    assertOrder(tuples, 4, 3, 1, 2);
-    
-    // Basic w/multi comp
-    expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
-    stream = new UniqueStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0,2,1,3,4);
-    
-    // full factory w/multi comp
-    stream = factory.constructStream("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 2, 1, 3, 4);
+    try {
+      // Basic test
+      expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")");
+      stream = new UniqueStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 4);
+      assertOrder(tuples, 0, 1, 3, 4);
+
+      // Basic test desc
+      expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc, a_i desc\"), over=\"a_f\")");
+      stream = new UniqueStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 4);
+      assertOrder(tuples, 4, 3, 1, 2);
 
+      // Basic w/multi comp
+      expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
+      stream = new UniqueStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+
+      // full factory w/multi comp
+      stream = factory.constructStream("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -369,30 +446,38 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
-    
-    StreamFactory factory = new StreamFactory()
-      .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
-      .withFunctionName("search", CloudSolrStream.class)
-      .withFunctionName("sort", SortStream.class);
-    
-    // Basic test
-    stream = factory.constructStream("sort(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i asc\")");
-    tuples = getTuples(stream);
-    assert(tuples.size() == 6);
-    assertOrder(tuples, 0, 1, 5, 2, 3, 4);
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+    try {
+      StreamFactory factory = new StreamFactory()
+          .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
+          .withFunctionName("search", CloudSolrStream.class)
+          .withFunctionName("sort", SortStream.class);
 
-    // Basic test desc
-    stream = factory.constructStream("sort(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i desc\")");
-    tuples = getTuples(stream);
-    assert(tuples.size() == 6);
-    assertOrder(tuples, 4,3,2,1,5,0);
-    
-    // Basic w/multi comp
-    stream = factory.constructStream("sort(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i asc, a_f desc\")");
-    tuples = getTuples(stream);
-    assert(tuples.size() == 6);
-    assertOrder(tuples, 0,5,1,2,3,4);
+      // Basic test
+      stream = factory.constructStream("sort(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i asc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+      assert (tuples.size() == 6);
+      assertOrder(tuples, 0, 1, 5, 2, 3, 4);
 
+      // Basic test desc
+      stream = factory.constructStream("sort(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i desc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+      assert (tuples.size() == 6);
+      assertOrder(tuples, 4, 3, 2, 1, 5, 0);
+
+      // Basic w/multi comp
+      stream = factory.constructStream("sort(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i asc, a_f desc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+      assert (tuples.size() == 6);
+      assertOrder(tuples, 0, 5, 1, 2, 3, 4);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
 
@@ -411,17 +496,24 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
-
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
     StreamFactory factory = new StreamFactory()
         .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
         .withFunctionName("search", CloudSolrStream.class)
         .withFunctionName("null", NullStream.class);
 
-    // Basic test
-    stream = factory.constructStream("null(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i asc\")");
-    tuples = getTuples(stream);
-    assertTrue(tuples.size() == 1);
-    assertTrue(tuples.get(0).getLong("nullCount") == 6);
+    try {
+      // Basic test
+      stream = factory.constructStream("null(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), by=\"a_i asc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+      assertTrue(tuples.size() == 1);
+      assertTrue(tuples.get(0).getLong("nullCount") == 6);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
 
@@ -440,6 +532,9 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
 
     StreamFactory factory = new StreamFactory()
         .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
@@ -447,24 +542,29 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .withFunctionName("null", NullStream.class)
         .withFunctionName("parallel", ParallelStream.class);
 
-    // Basic test
-    stream = factory.constructStream("parallel(" + COLLECTIONORALIAS + ", workers=2, sort=\"nullCount desc\", null(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=id), by=\"a_i asc\"))");
-    tuples = getTuples(stream);
-    assertTrue(tuples.size() == 2);
-    long nullCount = 0;
-    for(Tuple t : tuples) {
-      nullCount += t.getLong("nullCount");
-    }
+    try {
 
-    assertEquals(nullCount, 6L);
-  }
+      // Basic test
+      stream = factory.constructStream("parallel(" + COLLECTIONORALIAS + ", workers=2, sort=\"nullCount desc\", null(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=id), by=\"a_i asc\"))");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+      assertTrue(tuples.size() == 2);
+      long nullCount = 0;
+      for (Tuple t : tuples) {
+        nullCount += t.getLong("nullCount");
+      }
 
+      assertEquals(nullCount, 6L);
+    } finally {
+      solrClientCache.close();
+    }
+  }
 
   @Test
   public void testNulls() throws Exception {
 
     new UpdateRequest()
-        .add(id, "0",                  "a_i", "1", "a_f", "0", "s_multi", "aaa", "s_multi", "bbb", "i_multi", "100", "i_multi", "200")
+        .add(id, "0", "a_i", "1", "a_f", "0", "s_multi", "aaa", "s_multi", "bbb", "i_multi", "100", "i_multi", "200")
         .add(id, "2", "a_s", "hello2", "a_i", "3", "a_f", "0")
         .add(id, "3", "a_s", "hello3", "a_i", "4", "a_f", "3")
         .add(id, "4", "a_s", "hello4",             "a_f", "4")
@@ -475,49 +575,59 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     TupleStream stream;
     List<Tuple> tuples;
     Tuple tuple;
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
     StreamFactory factory = new StreamFactory()
         .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
         .withFunctionName("search", CloudSolrStream.class);
-    // Basic test
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_i asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+    try {
+      // Basic test
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_i asc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 4, 0, 1, 2, 3);
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 4, 0, 1, 2, 3);
 
-    tuple = tuples.get(0);
-    assertTrue("hello4".equals(tuple.getString("a_s")));
-    assertNull(tuple.get("s_multi"));
-    assertNull(tuple.get("i_multi"));
-    assertNull(tuple.getLong("a_i"));
+      tuple = tuples.get(0);
+      assertTrue("hello4".equals(tuple.getString("a_s")));
+      assertNull(tuple.get("s_multi"));
+      assertNull(tuple.get("i_multi"));
+      assertNull(tuple.getLong("a_i"));
 
 
-    tuple = tuples.get(1);
-    assertNull(tuple.get("a_s"));
-    List<String> strings = tuple.getStrings("s_multi");
-    assertNotNull(strings);
-    assertEquals("aaa", strings.get(0));
-    assertEquals("bbb", strings.get(1));
-    List<Long> longs = tuple.getLongs("i_multi");
-    assertNotNull(longs);
-
-    //test sort (asc) with null string field. Null should sort to the top.
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s asc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+      tuple = tuples.get(1);
+      assertNull(tuple.get("a_s"));
+      List<String> strings = tuple.getStrings("s_multi");
+      assertNotNull(strings);
+      assertEquals("aaa", strings.get(0));
+      assertEquals("bbb", strings.get(1));
+      List<Long> longs = tuple.getLongs("i_multi");
+      assertNotNull(longs);
 
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 1, 2, 3, 4);
+      //test sort (asc) with null string field. Null should sort to the top.
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s asc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    //test sort(desc) with null string field.  Null should sort to the bottom.
-    expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s desc\")");
-    stream = new CloudSolrStream(expression, factory);
-    tuples = getTuples(stream);
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 1, 2, 3, 4);
 
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 4, 3, 2, 1, 0);
+      //test sort(desc) with null string field.  Null should sort to the bottom.
+      expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s desc\")");
+      stream = new CloudSolrStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 4, 3, 2, 1, 0);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -546,55 +656,67 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"),"
         + "search(" + COLLECTIONORALIAS + ", q=\"id:(1)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"),"
         + "on=\"a_f asc\")");
-    stream = new MergeStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 4);
-    assertOrder(tuples, 0, 1, 3, 4);
 
-    // Basic test desc
-    expression = StreamExpressionParser.parse("merge("
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(1)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
-        + "on=\"a_f desc\")");
-    stream = new MergeStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 4);
-    assertOrder(tuples, 4, 3, 1, 0);
-    
-    // Basic w/multi comp
-    expression = StreamExpressionParser.parse("merge("
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-        + "on=\"a_f asc, a_s asc\")");
     stream = new MergeStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 2, 1, 3, 4);
-    
-    // full factory w/multi comp
-    stream = factory.constructStream("merge("
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-        + "on=\"a_f asc, a_s asc\")");
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 2, 1, 3, 4);
-    
-    // full factory w/multi streams
-    stream = factory.constructStream("merge("
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(1)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-        + "search(" + COLLECTIONORALIAS + ", q=\"id:(2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
-        + "on=\"a_f asc\")");
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 4);
-    assertOrder(tuples, 0, 2, 1, 4);
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+    try {
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 4);
+      assertOrder(tuples, 0, 1, 3, 4);
+
+      // Basic test desc
+      expression = StreamExpressionParser.parse("merge("
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(1)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
+          + "on=\"a_f desc\")");
+      stream = new MergeStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 4);
+      assertOrder(tuples, 4, 3, 1, 0);
+
+      // Basic w/multi comp
+      expression = StreamExpressionParser.parse("merge("
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+          + "on=\"a_f asc, a_s asc\")");
+      stream = new MergeStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+
+      // full factory w/multi comp
+      stream = factory.constructStream("merge("
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+          + "on=\"a_f asc, a_s asc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 2, 1, 3, 4);
+
+      // full factory w/multi streams
+      stream = factory.constructStream("merge("
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(0 4)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(1)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+          + "search(" + COLLECTIONORALIAS + ", q=\"id:(2)\", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_s asc\"),"
+          + "on=\"a_f asc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
+      assert (tuples.size() == 4);
+      assertOrder(tuples, 0, 2, 1, 4);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -611,61 +733,70 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
-    
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
     StreamFactory factory = new StreamFactory()
       .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
       .withFunctionName("search", CloudSolrStream.class)
       .withFunctionName("unique", UniqueStream.class)
       .withFunctionName("top", RankStream.class);
-    
-    // Basic test
-    expression = StreamExpressionParser.parse("top("
-        + "n=3,"
-        + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
-        + "sort=\"a_f asc, a_i asc\")");
-    stream = new RankStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 3);
-    assertOrder(tuples, 0, 2, 1);
+    try {
+      // Basic test
+      expression = StreamExpressionParser.parse("top("
+          + "n=3,"
+          + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
+          + "sort=\"a_f asc, a_i asc\")");
+      stream = new RankStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    // Basic test desc
-    expression = StreamExpressionParser.parse("top("
-        + "n=2,"
-        + "unique("
-        + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
-        + "over=\"a_f\"),"
-        + "sort=\"a_f desc\")");
-    stream = new RankStream(expression, factory);
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 2);
-    assertOrder(tuples, 4, 3);
-    
-    // full factory
-    stream = factory.constructStream("top("
-        + "n=4,"
-        + "unique("
-        + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
-        + "over=\"a_f\"),"
-        + "sort=\"a_f asc\")");
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 4);
-    assertOrder(tuples, 0,1,3,4);
+      assert (tuples.size() == 3);
+      assertOrder(tuples, 0, 2, 1);
+
+      // Basic test desc
+      expression = StreamExpressionParser.parse("top("
+          + "n=2,"
+          + "unique("
+          + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\"),"
+          + "over=\"a_f\"),"
+          + "sort=\"a_f desc\")");
+      stream = new RankStream(expression, factory);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    // full factory, switch order
-    stream = factory.constructStream("top("
-            + "n=4,"
-            + "unique("
-            +   "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc, a_i desc\"),"
-            +   "over=\"a_f\"),"
-            + "sort=\"a_f asc\")");
-    tuples = getTuples(stream);
-    
-    assert(tuples.size() == 4);
-    assertOrder(tuples, 2,1,3,4);
+      assert (tuples.size() == 2);
+      assertOrder(tuples, 4, 3);
+
+      // full factory
+      stream = factory.constructStream("top("
+          + "n=4,"
+          + "unique("
+          + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"),"
+          + "over=\"a_f\"),"
+          + "sort=\"a_f asc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 4);
+      assertOrder(tuples, 0, 1, 3, 4);
 
+      // full factory, switch order
+      stream = factory.constructStream("top("
+          + "n=4,"
+          + "unique("
+          + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc, a_i desc\"),"
+          + "over=\"a_f\"),"
+          + "sort=\"a_f asc\")");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
+
+      assert (tuples.size() == 4);
+      assertOrder(tuples, 2, 1, 3, 4);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -735,7 +866,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
 
       //Exercise the /stream handler
       ModifiableSolrParams sParams = new ModifiableSolrParams(StreamingTest.mapParams(CommonParams.QT, "/stream"));
-      sParams.add("expr", "random(" + COLLECTIONORALIAS + ", q=\"*:*\", rows=\"1\", fl=\"id, a_i\")" );
+      sParams.add("expr", "random(" + COLLECTIONORALIAS + ", q=\"*:*\", rows=\"1\", fl=\"id, a_i\")");
       JettySolrRunner jetty = cluster.getJettySolrRunner(0);
       SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/collection1", sParams);
       List<Tuple> tuples4 = getTuples(solrStream);
@@ -767,61 +898,69 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     List<Tuple> tuples;
     Tuple t0, t1, t2;
     List<Map> maps0, maps1, maps2;
-    
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
     StreamFactory factory = new StreamFactory()
         .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
         .withFunctionName("search", CloudSolrStream.class)
         .withFunctionName("reduce", ReducerStream.class)
         .withFunctionName("group", GroupOperation.class);
 
-    // basic
-    expression = StreamExpressionParser.parse("reduce("
-        + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s asc, a_f asc\"),"
-        + "by=\"a_s\","
-        + "group(sort=\"a_f desc\", n=\"4\"))");
+    try {
+      // basic
+      expression = StreamExpressionParser.parse("reduce("
+          + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s asc, a_f asc\"),"
+          + "by=\"a_s\","
+          + "group(sort=\"a_f desc\", n=\"4\"))");
 
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    assert(tuples.size() == 3);
+      assert (tuples.size() == 3);
 
-    t0 = tuples.get(0);
-    maps0 = t0.getMaps("group");
-    assertMaps(maps0, 9, 1, 2, 0);
+      t0 = tuples.get(0);
+      maps0 = t0.getMaps("group");
+      assertMaps(maps0, 9, 1, 2, 0);
 
-    t1 = tuples.get(1);
-    maps1 = t1.getMaps("group");
-    assertMaps(maps1, 8, 7, 5, 3);
+      t1 = tuples.get(1);
+      maps1 = t1.getMaps("group");
+      assertMaps(maps1, 8, 7, 5, 3);
 
 
-    t2 = tuples.get(2);
-    maps2 = t2.getMaps("group");
-    assertMaps(maps2, 6, 4);
-    
-    // basic w/spaces
-    expression = StreamExpressionParser.parse("reduce("
-        + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s asc, a_f       asc\"),"
-        + "by=\"a_s\"," +
-        "group(sort=\"a_i asc\", n=\"2\"))");
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
+      t2 = tuples.get(2);
+      maps2 = t2.getMaps("group");
+      assertMaps(maps2, 6, 4);
 
-    assert(tuples.size() == 3);
+      // basic w/spaces
+      expression = StreamExpressionParser.parse("reduce("
+          + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_s asc, a_f       asc\"),"
+          + "by=\"a_s\"," +
+          "group(sort=\"a_i asc\", n=\"2\"))");
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    t0 = tuples.get(0);
-    maps0 = t0.getMaps("group");
-    assert(maps0.size() == 2);
+      assert (tuples.size() == 3);
 
-    assertMaps(maps0, 0, 1);
+      t0 = tuples.get(0);
+      maps0 = t0.getMaps("group");
+      assert (maps0.size() == 2);
 
-    t1 = tuples.get(1);
-    maps1 = t1.getMaps("group");
-    assertMaps(maps1, 3, 5);
+      assertMaps(maps0, 0, 1);
 
-    t2 = tuples.get(2);
-    maps2 = t2.getMaps("group");
-    assertMaps(maps2, 4, 6);
+      t1 = tuples.get(1);
+      maps1 = t1.getMaps("group");
+      assertMaps(maps1, 3, 5);
 
+      t2 = tuples.get(2);
+      maps2 = t2.getMaps("group");
+      assertMaps(maps2, 4, 6);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
 
@@ -1044,7 +1183,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
   @Test
   public void testFetchStream() throws Exception {
 
-    SolrClientCache solrClientCache = new SolrClientCache();
+    SolrClientCache solrClientCache = new SolrClientCache();//TODO share in @Before ; close in @After ?
 
     new UpdateRequest()
         .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1", "subject", "blah blah blah 0")
@@ -1096,7 +1235,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue("blah blah blah 9".equals(t.getString("subject")));
 
     //Change the batch size
-    stream = factory.constructStream("fetch("+ COLLECTIONORALIAS +",  search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\")");
+    stream = factory.constructStream("fetch(" + COLLECTIONORALIAS + ",  search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\")");
     context = new StreamContext();
     context.setSolrClientCache(solrClientCache);
     stream.setStreamContext(context);
@@ -1123,6 +1262,22 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue("blah blah blah 8".equals(t.getString("subject")));
     t = tuples.get(9);
     assertTrue("blah blah blah 9".equals(t.getString("subject")));
+
+    // SOLR-10404 test that "hello 99" as a value gets escaped
+    new UpdateRequest()
+        .add(id, "99", "a1_s", "hello 99", "a2_s", "hello 99", "subject", "blah blah blah 99")
+        .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+
+    stream = factory.constructStream("fetch("+ COLLECTIONORALIAS +",  search(" + COLLECTIONORALIAS + ", q=" + id + ":99, fl=\"id,a1_s\", sort=\"id asc\"), on=\"a1_s=a2_s\", fl=\"subject\")");
+    context = new StreamContext();
+    context.setSolrClientCache(solrClientCache);
+    stream.setStreamContext(context);
+    tuples = getTuples(stream);
+
+    assertEquals(1, tuples.size());
+    t = tuples.get(0);
+    assertTrue("blah blah blah 99".equals(t.getString("subject")));
+
     solrClientCache.close();
   }
 
@@ -1142,6 +1297,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .add(id, "9", "a_s", "hello0", "a_i", "9", "a_f", "10", "subject", "blah blah blah 9")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
     TupleStream stream;
     List<Tuple> tuples;
 
@@ -1151,57 +1310,63 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .withFunctionName("parallel", ParallelStream.class)
         .withFunctionName("fetch", FetchStream.class);
 
-    stream = factory.constructStream("parallel(" + COLLECTIONORALIAS + ", workers=2, sort=\"a_f asc\", fetch(" + COLLECTIONORALIAS + ",  search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"2\", fl=\"subject\"))");
-    tuples = getTuples(stream);
-
-    assert(tuples.size() == 10);
-    Tuple t = tuples.get(0);
-    assertTrue("blah blah blah 0".equals(t.getString("subject")));
-    t = tuples.get(1);
-    assertTrue("blah blah blah 2".equals(t.getString("subject")));
-    t = tuples.get(2);
-    assertTrue("blah blah blah 3".equals(t.getString("subject")));
-    t = tuples.get(3);
-    assertTrue("blah blah blah 4".equals(t.getString("subject")));
-    t = tuples.get(4);
-    assertTrue("blah blah blah 1".equals(t.getString("subject")));
-    t = tuples.get(5);
-    assertTrue("blah blah blah 5".equals(t.getString("subject")));
-    t = tuples.get(6);
-    assertTrue("blah blah blah 6".equals(t.getString("subject")));
-    t = tuples.get(7);
-    assertTrue("blah blah blah 7".equals(t.getString("subject")));
-    t = tuples.get(8);
-    assertTrue("blah blah blah 8".equals(t.getString("subject")));
-    t = tuples.get(9);
-    assertTrue("blah blah blah 9".equals(t.getString("subject")));
-
+    try {
 
-    stream = factory.constructStream("parallel(" + COLLECTIONORALIAS + ", workers=2, sort=\"a_f asc\", fetch(" + COLLECTIONORALIAS + ",  search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\"))");
-    tuples = getTuples(stream);
+      stream = factory.constructStream("parallel(" + COLLECTIONORALIAS + ", workers=2, sort=\"a_f asc\", fetch(" + COLLECTIONORALIAS + ",  search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"2\", fl=\"subject\"))");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    assert(tuples.size() == 10);
-    t = tuples.get(0);
-    assertTrue("blah blah blah 0".equals(t.getString("subject")));
-    t = tuples.get(1);
-    assertTrue("blah blah blah 2".equals(t.getString("subject")));
-    t = tuples.get(2);
-    assertTrue("blah blah blah 3".equals(t.getString("subject")));
-    t = tuples.get(3);
-    assertTrue("blah blah blah 4".equals(t.getString("subject")));
-    t = tuples.get(4);
-    assertTrue("blah blah blah 1".equals(t.getString("subject")));
-    t = tuples.get(5);
-    assertTrue("blah blah blah 5".equals(t.getString("subject")));
-    t = tuples.get(6);
-    assertTrue("blah blah blah 6".equals(t.getString("subject")));
-    t = tuples.get(7);
-    assertTrue("blah blah blah 7".equals(t.getString("subject")));
-    t = tuples.get(8);
-    assertTrue("blah blah blah 8".equals(t.getString("subject")));
-    t = tuples.get(9);
-    assertTrue("blah blah blah 9".equals(t.getString("subject")));
+      assert (tuples.size() == 10);
+      Tuple t = tuples.get(0);
+      assertTrue("blah blah blah 0".equals(t.getString("subject")));
+      t = tuples.get(1);
+      assertTrue("blah blah blah 2".equals(t.getString("subject")));
+      t = tuples.get(2);
+      assertTrue("blah blah blah 3".equals(t.getString("subject")));
+      t = tuples.get(3);
+      assertTrue("blah blah blah 4".equals(t.getString("subject")));
+      t = tuples.get(4);
+      assertTrue("blah blah blah 1".equals(t.getString("subject")));
+      t = tuples.get(5);
+      assertTrue("blah blah blah 5".equals(t.getString("subject")));
+      t = tuples.get(6);
+      assertTrue("blah blah blah 6".equals(t.getString("subject")));
+      t = tuples.get(7);
+      assertTrue("blah blah blah 7".equals(t.getString("subject")));
+      t = tuples.get(8);
+      assertTrue("blah blah blah 8".equals(t.getString("subject")));
+      t = tuples.get(9);
+      assertTrue("blah blah blah 9".equals(t.getString("subject")));
+
+
+      stream = factory.constructStream("parallel(" + COLLECTIONORALIAS + ", workers=2, sort=\"a_f asc\", fetch(" + COLLECTIONORALIAS + ",  search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\", partitionKeys=\"id\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\"))");
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
+      assert (tuples.size() == 10);
+      t = tuples.get(0);
+      assertTrue("blah blah blah 0".equals(t.getString("subject")));
+      t = tuples.get(1);
+      assertTrue("blah blah blah 2".equals(t.getString("subject")));
+      t = tuples.get(2);
+      assertTrue("blah blah blah 3".equals(t.getString("subject")));
+      t = tuples.get(3);
+      assertTrue("blah blah blah 4".equals(t.getString("subject")));
+      t = tuples.get(4);
+      assertTrue("blah blah blah 1".equals(t.getString("subject")));
+      t = tuples.get(5);
+      assertTrue("blah blah blah 5".equals(t.getString("subject")));
+      t = tuples.get(6);
+      assertTrue("blah blah blah 6".equals(t.getString("subject")));
+      t = tuples.get(7);
+      assertTrue("blah blah blah 7".equals(t.getString("subject")));
+      t = tuples.get(8);
+      assertTrue("blah blah blah 8".equals(t.getString("subject")));
+      t = tuples.get(9);
+      assertTrue("blah blah blah 9".equals(t.getString("subject")));
+    } finally {
+      solrClientCache.close();
+    }
   }
 
 
@@ -1244,87 +1409,91 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         + "sum(a_i)"
         + "), id=\"test\", runInterval=\"1000\", queueSize=\"9\")");
     daemonStream = (DaemonStream)factory.constructStream(expression);
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+    daemonStream.setStreamContext(streamContext);
+    try {
+      //Test Long and Double Sums
 
+      daemonStream.open(); // This will start the daemon thread
 
-    //Test Long and Double Sums
-
-    daemonStream.open(); // This will start the daemon thread
-
-    for(int i=0; i<4; i++) {
-      Tuple tuple = daemonStream.read(); // Reads from the queue
-      String bucket = tuple.getString("a_s");
-      Double sumi = tuple.getDouble("sum(a_i)");
-
-      //System.out.println("#################################### Bucket 1:"+bucket);
-      assertTrue(bucket.equals("hello0"));
-      assertTrue(sumi.doubleValue() == 17.0D);
-
-      tuple = daemonStream.read();
-      bucket = tuple.getString("a_s");
-      sumi = tuple.getDouble("sum(a_i)");
-
-      //System.out.println("#################################### Bucket 2:"+bucket);
-      assertTrue(bucket.equals("hello3"));
-      assertTrue(sumi.doubleValue() == 38.0D);
+      for (int i = 0; i < 4; i++) {
+        Tuple tuple = daemonStream.read(); // Reads from the queue
+        String bucket = tuple.getString("a_s");
+        Double sumi = tuple.getDouble("sum(a_i)");
 
-      tuple = daemonStream.read();
-      bucket = tuple.getString("a_s");
-      sumi = tuple.getDouble("sum(a_i)");
-      //System.out.println("#################################### Bucket 3:"+bucket);
-      assertTrue(bucket.equals("hello4"));
-      assertTrue(sumi.longValue() == 15);
-    }
+        //System.out.println("#################################### Bucket 1:"+bucket);
+        assertTrue(bucket.equals("hello0"));
+        assertTrue(sumi.doubleValue() == 17.0D);
 
-    //Now lets wait until the internal queue fills up
+        tuple = daemonStream.read();
+        bucket = tuple.getString("a_s");
+        sumi = tuple.getDouble("sum(a_i)");
 
-    while(daemonStream.remainingCapacity() > 0) {
-      try {
-        Thread.sleep(1000);
-      } catch (Exception e) {
+        //System.out.println("#################################### Bucket 2:"+bucket);
+        assertTrue(bucket.equals("hello3"));
+        assertTrue(sumi.doubleValue() == 38.0D);
 
+        tuple = daemonStream.read();
+        bucket = tuple.getString("a_s");
+        sumi = tuple.getDouble("sum(a_i)");
+        //System.out.println("#################################### Bucket 3:"+bucket);
+        assertTrue(bucket.equals("hello4"));
+        assertTrue(sumi.longValue() == 15);
       }
-    }
-
-    //OK capacity is full, let's index a new doc
 
-    new UpdateRequest()
-        .add(id, "10", "a_s", "hello0", "a_i", "1", "a_f", "10")
-        .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+      //Now lets wait until the internal queue fills up
 
-    //Now lets clear the existing docs in the queue 9, plus 3 more to get passed the run that was blocked. The next run should
-    //have the tuples with the updated count.
-    for(int i=0; i<12;i++) {
-      daemonStream.read();
-    }
+      while (daemonStream.remainingCapacity() > 0) {
+        try {
+          Thread.sleep(1000);
+        } catch (Exception e) {
 
-    //And rerun the loop. It should have a new count for hello0
-    for(int i=0; i<4; i++) {
-      Tuple tuple = daemonStream.read(); // Reads from the queue
-      String bucket = tuple.getString("a_s");
-      Double sumi = tuple.getDouble("sum(a_i)");
+        }
+      }
 
-      //System.out.println("#################################### Bucket 1:"+bucket);
-      assertTrue(bucket.equals("hello0"));
-      assertTrue(sumi.doubleValue() == 18.0D);
+      //OK capacity is full, let's index a new doc
 
-      tuple = daemonStream.read();
-      bucket = tuple.getString("a_s");
-      sumi = tuple.getDouble("sum(a_i)");
+      new UpdateRequest()
+          .add(id, "10", "a_s", "hello0", "a_i", "1", "a_f", "10")
+          .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-      //System.out.println("#################################### Bucket 2:"+bucket);
-      assertTrue(bucket.equals("hello3"));
-      assertTrue(sumi.doubleValue() == 38.0D);
+      //Now lets clear the existing docs in the queue 9, plus 3 more to get passed the run that was blocked. The next run should
+      //have the tuples with the updated count.
+      for (int i = 0; i < 12; i++) {
+        daemonStream.read();
+      }
 
-      tuple = daemonStream.read();
-      bucket = tuple.getString("a_s");
-      sumi = tuple.getDouble("sum(a_i)");
-      //System.out.println("#################################### Bucket 3:"+bucket);
-      assertTrue(bucket.equals("hello4"));
-      assertTrue(sumi.longValue() == 15);
+      //And rerun the loop. It should have a new count for hello0
+      for (int i = 0; i < 4; i++) {
+        Tuple tuple = daemonStream.read(); // Reads from the queue
+        String bucket = tuple.getString("a_s");
+        Double sumi = tuple.getDouble("sum(a_i)");
+
+        //System.out.println("#################################### Bucket 1:"+bucket);
+        assertTrue(bucket.equals("hello0"));
+        assertTrue(sumi.doubleValue() == 18.0D);
+
+        tuple = daemonStream.read();
+        bucket = tuple.getString("a_s");
+        sumi = tuple.getDouble("sum(a_i)");
+
+        //System.out.println("#################################### Bucket 2:"+bucket);
+        assertTrue(bucket.equals("hello3"));
+        assertTrue(sumi.doubleValue() == 38.0D);
+
+        tuple = daemonStream.read();
+        bucket = tuple.getString("a_s");
+        sumi = tuple.getDouble("sum(a_i)");
+        //System.out.println("#################################### Bucket 3:"+bucket);
+        assertTrue(bucket.equals("hello4"));
+        assertTrue(sumi.longValue() == 15);
+      }
+    } finally {
+      daemonStream.close(); //This should stop the daemon thread
+      solrClientCache.close();
     }
-
-    daemonStream.close(); //This should stop the daemon thread
-
   }
 
 
@@ -1396,96 +1565,103 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+    try {
+      expression = StreamExpressionParser.parse("rollup("
+          + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"a_s,a_i,a_f\", sort=\"a_s asc\"),"
+          + "over=\"a_s\","
+          + "sum(a_i),"
+          + "sum(a_f),"
+          + "min(a_i),"
+          + "min(a_f),"
+          + "max(a_i),"
+          + "max(a_f),"
+          + "avg(a_i),"
+          + "avg(a_f),"
+          + "count(*),"
+          + ")");
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    expression = StreamExpressionParser.parse("rollup("
-                                              + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"a_s,a_i,a_f\", sort=\"a_s asc\"),"
-                                              + "over=\"a_s\","
-                                              + "sum(a_i),"
-                                              + "sum(a_f),"
-                                              + "min(a_i),"
-                                              + "min(a_f),"
-                                              + "max(a_i),"
-                                              + "max(a_f),"
-                                              + "avg(a_i),"
-                                              + "avg(a_f),"
-                                              + "count(*),"
-                                              + ")");
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
-
-    assert(tuples.size() == 3);
-
-    //Test Long and Double Sums
-
-    Tuple tuple = tuples.get(0);
-    String bucket = tuple.getString("a_s");
-    Double sumi = tuple.getDouble("sum(a_i)");
-    Double sumf = tuple.getDouble("sum(a_f)");
-    Double mini = tuple.getDouble("min(a_i)");
-    Double minf = tuple.getDouble("min(a_f)");
-    Double maxi = tuple.getDouble("max(a_i)");
-    Double maxf = tuple.getDouble("max(a_f)");
-    Double avgi = tuple.getDouble("avg(a_i)");
-    Double avgf = tuple.getDouble("avg(a_f)");
-    Double count = tuple.getDouble("count(*)");
-
-    assertTrue(bucket.equals("hello0"));
-    assertTrue(sumi.doubleValue() == 17.0D);
-    assertTrue(sumf.doubleValue() == 18.0D);
-    assertTrue(mini.doubleValue() == 0.0D);
-    assertTrue(minf.doubleValue() == 1.0D);
-    assertTrue(maxi.doubleValue() == 14.0D);
-    assertTrue(maxf.doubleValue() == 10.0D);
-    assertTrue(avgi.doubleValue() == 4.25D);
-    assertTrue(avgf.doubleValue() == 4.5D);
-    assertTrue(count.doubleValue() == 4);
+      assert (tuples.size() == 3);
 
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
+      //Test Long and Double Sums
 
-    assertTrue(bucket.equals("hello3"));
-    assertTrue(sumi.doubleValue() == 38.0D);
-    assertTrue(sumf.doubleValue() == 26.0D);
-    assertTrue(mini.doubleValue() == 3.0D);
-    assertTrue(minf.doubleValue() == 3.0D);
-    assertTrue(maxi.doubleValue() == 13.0D);
-    assertTrue(maxf.doubleValue() == 9.0D);
-    assertTrue(avgi.doubleValue() == 9.5D);
-    assertTrue(avgf.doubleValue() == 6.5D);
-    assertTrue(count.doubleValue() == 4);
+      Tuple tuple = tuples.get(0);
+      String bucket = tuple.getString("a_s");
+      Double sumi = tuple.getDouble("sum(a_i)");
+      Double sumf = tuple.getDouble("sum(a_f)");
+      Double mini = tuple.getDouble("min(a_i)");
+      Double minf = tuple.getDouble("min(a_f)");
+      Double maxi = tuple.getDouble("max(a_i)");
+      Double maxf = tuple.getDouble("max(a_f)");
+      Double avgi = tuple.getDouble("avg(a_i)");
+      Double avgf = tuple.getDouble("avg(a_f)");
+      Double count = tuple.getDouble("count(*)");
 
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
+      assertTrue(bucket.equals("hello0"));
+      assertTrue(sumi.doubleValue() == 17.0D);
+      assertTrue(sumf.doubleValue() == 18.0D);
+      assertTrue(mini.doubleValue() == 0.0D);
+      assertTrue(minf.doubleValue() == 1.0D);
+      assertTrue(maxi.doubleValue() == 14.0D);
+      assertTrue(maxf.doubleValue() == 10.0D);
+      assertTrue(avgi.doubleValue() == 4.25D);
+      assertTrue(avgf.doubleValue() == 4.5D);
+      assertTrue(count.doubleValue() == 4);
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
 
-    assertTrue(bucket.equals("hello4"));
-    assertTrue(sumi.longValue() == 15);
-    assertTrue(sumf.doubleValue() == 11.0D);
-    assertTrue(mini.doubleValue() == 4.0D);
-    assertTrue(minf.doubleValue() == 4.0D);
-    assertTrue(maxi.doubleValue() == 11.0D);
-    assertTrue(maxf.doubleValue() == 7.0D);
-    assertTrue(avgi.doubleValue() == 7.5D);
-    assertTrue(avgf.doubleValue() == 5.5D);
-    assertTrue(count.doubleValue() == 2);
+      assertTrue(bucket.equals("hello3"));
+      assertTrue(sumi.doubleValue() == 38.0D);
+      assertTrue(sumf.doubleValue() == 26.0D);
+      assertTrue(mini.doubleValue() == 3.0D);
+      assertTrue(minf.doubleValue() == 3.0D);
+      assertTrue(maxi.doubleValue() == 13.0D);
+      assertTrue(maxf.doubleValue() == 9.0D);
+      assertTrue(avgi.doubleValue() == 9.5D);
+      assertTrue(avgf.doubleValue() == 6.5D);
+      assertTrue(count.doubleValue() == 4);
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
 
+      assertTrue(bucket.equals("hello4"));
+      assertTrue(sumi.longValue() == 15);
+      assertTrue(sumf.doubleValue() == 11.0D);
+      assertTrue(mini.doubleValue() == 4.0D);
+      assertTrue(minf.doubleValue() == 4.0D);
+      assertTrue(maxi.doubleValue() == 11.0D);
+      assertTrue(maxf.doubleValue() == 7.0D);
+      assertTrue(avgi.doubleValue() == 7.5D);
+      assertTrue(avgf.doubleValue() == 5.5D);
+      assertTrue(count.doubleValue() == 2);
+
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -1572,20 +1748,122 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .withFunctionName("top", RankStream.class)
         .withFunctionName("group", ReducerStream.class)
         .withFunctionName("parallel", ParallelStream.class);
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+
+
+    try {
+
+      ParallelStream pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", unique(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", partitionKeys=\"a_f\"), over=\"a_f\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_f asc\")");
+      pstream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(pstream);
+      assert (tuples.size() == 5);
+      assertOrder(tuples, 0, 1, 3, 4, 6);
+
+      //Test the eofTuples
+
+      Map<String, Tuple> eofTuples = pstream.getEofTuples();
+      assert (eofTuples.size() == 2); //There should be an EOF tuple for each worker.
+    } finally {
+      solrClientCache.close();
+    }
+  }
+
+  @Test
+  public void testParallelShuffleStream() throws Exception {
 
-    ParallelStream pstream = (ParallelStream)streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", unique(search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", partitionKeys=\"a_f\"), over=\"a_f\"), workers=\"2\", zkHost=\""+zkHost+"\", sort=\"a_f asc\")");
+    new UpdateRequest()
+        .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0")
+        .add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0")
+        .add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3")
+        .add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4")
+        .add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1")
+        .add(id, "5", "a_s", "hello1", "a_i", "10", "a_f", "1")
+        .add(id, "6", "a_s", "hello1", "a_i", "11", "a_f", "5")
+        .add(id, "7", "a_s", "hello1", "a_i", "12", "a_f", "5")
+        .add(id, "8", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "9", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "10", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "11", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "12", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "13", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "14", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "15", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "16", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "17", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "18", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "19", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "20", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "21", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "22", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "23", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "24", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "25", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "26", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "27", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "28", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "29", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "30", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "31", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "32", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "33", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "34", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "35", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "36", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "37", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "38", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "39", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "40", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "41", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "42", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "43", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "44", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "45", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "46", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "47", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "48", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "49", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "50", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "51", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "52", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "53", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "54", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "55", "a_s", "hello1", "a_i", "13", "a_f", "4")
+        .add(id, "56", "a_s", "hello1", "a_i", "13", "a_f", "1000")
 
-    List<Tuple> tuples = getTuples(pstream);
-    assert(tuples.size() == 5);
-    assertOrder(tuples, 0, 1, 3, 4, 6);
+        .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    //Test the eofTuples
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
 
-    Map<String,Tuple> eofTuples = pstream.getEofTuples();
-    assert(eofTuples.size() == 2); //There should be an EOF tuple for each worker.
+    String zkHost = cluster.getZkServer().getZkAddress();
+    StreamFactory streamFactory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, zkHost)
+        .withFunctionName("shuffle", ShuffleStream.class)
+        .withFunctionName("unique", UniqueStream.class)
+        .withFunctionName("parallel", ParallelStream.class);
 
+    try {
+      ParallelStream pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", unique(shuffle(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", partitionKeys=\"a_f\"), over=\"a_f\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_f asc\")");
+      pstream.setStreamFactory(streamFactory);
+      pstream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(pstream);
+      assert (tuples.size() == 6);
+      assertOrder(tuples, 0, 1, 3, 4, 6, 56);
+
+      //Test the eofTuples
+
+      Map<String, Tuple> eofTuples = pstream.getEofTuples();
+      assert (eofTuples.size() == 2); //There should be an EOF tuple for each worker.
+      assert (pstream.toExpression(streamFactory).toString().contains("shuffle"));
+    } finally {
+      solrClientCache.close();
+    }
   }
 
+
   @Test
   public void testParallelReducerStream() throws Exception {
 
@@ -1602,6 +1880,11 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+
     String zkHost = cluster.getZkServer().getZkAddress();
     StreamFactory streamFactory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, zkHost)
         .withFunctionName("search", CloudSolrStream.class)
@@ -1609,54 +1892,62 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .withFunctionName("reduce", ReducerStream.class)
         .withFunctionName("parallel", ParallelStream.class);
 
-    ParallelStream pstream = (ParallelStream)streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", " +
-                                                                                    "reduce(" +
-                                                                                              "search(" + COLLECTIONORALIAS + ", q=\"*:*\", fl=\"id,a_s,a_i,a_f\", sort=\"a_s asc,a_f asc\", partitionKeys=\"a_s\"), " +
-                                                                                              "by=\"a_s\"," +
-                                                                                              "group(sort=\"a_i asc\", n=\"5\")), " +
-                                                                                    "workers=\"2\", zkHost=\""+zkHost+"\", sort=\"a_s asc\")");
 
-    List<Tuple> tuples = getTuples(pstream);
+    try {
+      ParallelStream pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", " +
+          "reduce(" +
+          "search(" + COLLECTIONORALIAS + ", q=\"*:*\", fl=\"id,a_s,a_i,a_f\", sort=\"a_s asc,a_f asc\", partitionKeys=\"a_s\"), " +
+          "by=\"a_s\"," +
+          "group(sort=\"a_i asc\", n=\"5\")), " +
+          "workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_s asc\")");
 
-    assert(tuples.size() == 3);
+      pstream.setStreamContext(streamContext);
 
-    Tuple t0 = tuples.get(0);
-    List<Map> maps0 = t0.getMaps("group");
-    assertMaps(maps0, 0, 1, 2, 9);
+      List<Tuple> tuples = getTuples(pstream);
 
-    Tuple t1 = tuples.get(1);
-    List<Map> maps1 = t1.getMaps("group");
-    assertMaps(maps1, 3, 5, 7, 8);
+      assert (tuples.size() == 3);
 
-    Tuple t2 = tuples.get(2);
-    List<Map> maps2 = t2.getMaps("group");
-    assertMaps(maps2, 4, 6);
+      Tuple t0 = tuples.get(0);
+      List<Map> maps0 = t0.getMaps("group");
+      assertMaps(maps0, 0, 1, 2, 9);
 
+      Tuple t1 = tuples.get(1);
+      List<Map> maps1 = t1.getMaps("group");
+      assertMaps(maps1, 3, 5, 7, 8);
 
-    pstream = (ParallelStream)streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", " +
-                                                                      "reduce(" +
-                                                                              "search(" + COLLECTIONORALIAS + ", q=\"*:*\", fl=\"id,a_s,a_i,a_f\", sort=\"a_s desc,a_f asc\", partitionKeys=\"a_s\"), " +
-                                                                              "by=\"a_s\", " +
-                                                                              "group(sort=\"a_i desc\", n=\"5\")),"+
-                                                                      "workers=\"2\", zkHost=\""+zkHost+"\", sort=\"a_s desc\")");
+      Tuple t2 = tuples.get(2);
+      List<Map> maps2 = t2.getMaps("group");
+      assertMaps(maps2, 4, 6);
 
-    tuples = getTuples(pstream);
 
-    assert(tuples.size() == 3);
+      pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", " +
+          "reduce(" +
+          "search(" + COLLECTIONORALIAS + ", q=\"*:*\", fl=\"id,a_s,a_i,a_f\", sort=\"a_s desc,a_f asc\", partitionKeys=\"a_s\"), " +
+          "by=\"a_s\", " +
+          "group(sort=\"a_i desc\", n=\"5\"))," +
+          "workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_s desc\")");
 
-    t0 = tuples.get(0);
-    maps0 = t0.getMaps("group");
-    assertMaps(maps0, 6, 4);
+      pstream.setStreamContext(streamContext);
+      tuples = getTuples(pstream);
 
+      assert (tuples.size() == 3);
+
+      t0 = tuples.get(0);
+      maps0 = t0.getMaps("group");
+      assertMaps(maps0, 6, 4);
 
-    t1 = tuples.get(1);
-    maps1 = t1.getMaps("group");
-    assertMaps(maps1, 8, 7, 5, 3);
 
+      t1 = tuples.get(1);
+      maps1 = t1.getMaps("group");
+      assertMaps(maps1, 8, 7, 5, 3);
 
-    t2 = tuples.get(2);
-    maps2 = t2.getMaps("group");
-    assertMaps(maps2, 9, 2, 1, 0);
+
+      t2 = tuples.get(2);
+      maps2 = t2.getMaps("group");
+      assertMaps(maps2, 9, 2, 1, 0);
+    } finally {
+      solrClientCache.close();
+    }
 
   }
 
@@ -1684,17 +1975,24 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .withFunctionName("group", ReducerStream.class)
         .withFunctionName("parallel", ParallelStream.class);
 
-    ParallelStream pstream = (ParallelStream)streamFactory.constructStream("parallel("
-        + COLLECTIONORALIAS + ", "
-        + "top("
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+    try {
+      ParallelStream pstream = (ParallelStream) streamFactory.constructStream("parallel("
+          + COLLECTIONORALIAS + ", "
+          + "top("
           + "search(" + COLLECTIONORALIAS + ", q=\"*:*\", fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\"), "
           + "n=\"11\", "
-          + "sort=\"a_i desc\"), workers=\"2\", zkHost=\""+zkHost+"\", sort=\"a_i desc\")");
+          + "sort=\"a_i desc\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_i desc\")");
+      pstream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(pstream);
 
-    List<Tuple> tuples = getTuples(pstream);
-
-    assert(tuples.size() == 10);
-    assertOrder(tuples, 10,9,8,7,6,5,4,3,2,0);
+      assert (tuples.size() == 10);
+      assertOrder(tuples, 10, 9, 8, 7, 6, 5, 4, 3, 2, 0);
+    } finally {
+      solrClientCache.close();
+    }
 
   }
 
@@ -1723,24 +2021,29 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         .withFunctionName("merge", MergeStream.class)
         .withFunctionName("parallel", ParallelStream.class);
 
-    //Test ascending
-    ParallelStream pstream = (ParallelStream)streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", merge(search(" + COLLECTIONORALIAS + ", q=\"id:(4 1 8 7 9)\", fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\"), search(" + COLLECTIONORALIAS + ", q=\"id:(0 2 3 6)\", fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\"), on=\"a_i asc\"), workers=\"2\", zkHost=\""+zkHost+"\", sort=\"a_i asc\")");
-
-    List<Tuple> tuples = getTuples(pstream);
-
-
-
-    assert(tuples.size() == 9);
-    assertOrder(tuples, 0, 1, 2, 3, 4, 7, 6, 8, 9);
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+    try {
+      //Test ascending
+      ParallelStream pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", merge(search(" + COLLECTIONORALIAS + ", q=\"id:(4 1 8 7 9)\", fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\"), search(" + COLLECTIONORALIAS + ", q=\"id:(0 2 3 6)\", fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\"), on=\"a_i asc\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_i asc\")");
+      pstream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(pstream);
 
-    //Test descending
+      assert (tuples.size() == 9);
+      assertOrder(tuples, 0, 1, 2, 3, 4, 7, 6, 8, 9);
 
-    pstream = (ParallelStream)streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", merge(search(" + COLLECTIONORALIAS + ", q=\"id:(4 1 8 9)\", fl=\"id,a_s,a_i\", sort=\"a_i desc\", partitionKeys=\"a_i\"), search(" + COLLECTIONORALIAS + ", q=\"id:(0 2 3 6)\", fl=\"id,a_s,a_i\", sort=\"a_i desc\", partitionKeys=\"a_i\"), on=\"a_i desc\"), workers=\"2\", zkHost=\""+zkHost+"\", sort=\"a_i desc\")");
+      //Test descending
 
-    tuples = getTuples(pstream);
+      pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", merge(search(" + COLLECTIONORALIAS + ", q=\"id:(4 1 8 9)\", fl=\"id,a_s,a_i\", sort=\"a_i desc\", partitionKeys=\"a_i\"), search(" + COLLECTIONORALIAS + ", q=\"id:(0 2 3 6)\", fl=\"id,a_s,a_i\", sort=\"a_i desc\", partitionKeys=\"a_i\"), on=\"a_i desc\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_i desc\")");
+      pstream.setStreamContext(streamContext);
+      tuples = getTuples(pstream);
 
-    assert(tuples.size() == 8);
-    assertOrder(tuples, 9, 8, 6, 4, 3, 2, 1, 0);
+      assert (tuples.size() == 8);
+      assertOrder(tuples, 9, 8, 6, 4, 3, 2, 1, 0);
+    } finally {
+      solrClientCache.close();
+    }
 
   }
 
@@ -1769,104 +2072,115 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       .withFunctionName("min", MinMetric.class)
       .withFunctionName("max", MaxMetric.class)
       .withFunctionName("avg", MeanMetric.class)
-      .withFunctionName("count", CountMetric.class);     
-    
+      .withFunctionName("count", CountMetric.class);
+
+
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
 
-    expression = StreamExpressionParser.parse("parallel(" + COLLECTIONORALIAS + ","
-                                              + "rollup("
-                                                + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"a_s,a_i,a_f\", sort=\"a_s asc\", partitionKeys=\"a_s\"),"
-                                                + "over=\"a_s\","
-                                                + "sum(a_i),"
-                                                + "sum(a_f),"
-                                                + "min(a_i),"
-                                                + "min(a_f),"
-                                                + "max(a_i),"
-                                                + "max(a_f),"
-                                                + "avg(a_i),"
-                                                + "avg(a_f),"
-                                                + "count(*)"
-                                              + "),"
-                                              + "workers=\"2\", zkHost=\""+cluster.getZkServer().getZkAddress()+"\", sort=\"a_s asc\")"
-                                              );
-    stream = factory.constructStream(expression);
-    tuples = getTuples(stream);
-
-    assert(tuples.size() == 3);
+    try {
+      expression = StreamExpressionParser.parse("parallel(" + COLLECTIONORALIAS + ","
+              + "rollup("
+              + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"a_s,a_i,a_f\", sort=\"a_s asc\", partitionKeys=\"a_s\"),"
+              + "over=\"a_s\","
+              + "sum(a_i),"
+              + "sum(a_f),"
+              + "min(a_i),"
+              + "min(a_f),"
+              + "max(a_i),"
+              + "max(a_f),"
+              + "avg(a_i),"
+              + "avg(a_f),"
+              + "count(*)"
+              + "),"
+              + "workers=\"2\", zkHost=\"" + cluster.getZkServer().getZkAddress() + "\", sort=\"a_s asc\")"
+      );
 
-    //Test Long and Double Sums
 
-    Tuple tuple = tuples.get(0);
-    String bucket = tuple.getString("a_s");
-    Double sumi = tuple.getDouble("sum(a_i)");
-    Double sumf = tuple.getDouble("sum(a_f)");
-    Double mini = tuple.getDouble("min(a_i)");
-    Double minf = tuple.getDouble("min(a_f)");
-    Double maxi = tuple.getDouble("max(a_i)");
-    Double maxf = tuple.getDouble("max(a_f)");
-    Double avgi = tuple.getDouble("avg(a_i)");
-    Double avgf = tuple.getDouble("avg(a_f)");
-    Double count = tuple.getDouble("count(*)");
+      stream = factory.constructStream(expression);
+      stream.setStreamContext(streamContext);
+      tuples = getTuples(stream);
 
-    assertTrue(bucket.equals("hello0"));
-    assertTrue(sumi.doubleValue() == 17.0D);
-    assertTrue(sumf.doubleValue() == 18.0D);
-    assertTrue(mini.doubleValue() == 0.0D);
-    assertTrue(minf.doubleValue() == 1.0D);
-    assertTrue(maxi.doubleValue() == 14.0D);
-    assertTrue(maxf.doubleValue() == 10.0D);
-    assertTrue(avgi.doubleValue() == 4.25D);
-    assertTrue(avgf.doubleValue() == 4.5D);
-    assertTrue(count.doubleValue() == 4);
+      assert (tuples.size() == 3);
 
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
+      //Test Long and Double Sums
 
-    assertTrue(bucket.equals("hello3"));
-    assertTrue(sumi.doubleValue() == 38.0D);
-    assertTrue(sumf.doubleValue() == 26.0D);
-    assertTrue(mini.doubleValue() == 3.0D);
-    assertTrue(minf.doubleValue() == 3.0D);
-    assertTrue(maxi.doubleValue() == 13.0D);
-    assertTrue(maxf.doubleValue() == 9.0D);
-    assertTrue(avgi.doubleValue() == 9.5D);
-    assertTrue(avgf.doubleValue() == 6.5D);
-    assertTrue(count.doubleValue() == 4);
+      Tuple tuple = tuples.get(0);
+      String bucket = tuple.getString("a_s");
+      Double sumi = tuple.getDouble("sum(a_i)");
+      Double sumf = tuple.getDouble("sum(a_f)");
+      Double mini = tuple.getDouble("min(a_i)");
+      Double minf = tuple.getDouble("min(a_f)");
+      Double maxi = tuple.getDouble("max(a_i)");
+      Double maxf = tuple.getDouble("max(a_f)");
+      Double avgi = tuple.getDouble("avg(a_i)");
+      Double avgf = tuple.getDouble("avg(a_f)");
+      Double count = tuple.getDouble("count(*)");
 
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
+      assertTrue(bucket.equals("hello0"));
+      assertTrue(sumi.doubleValue() == 17.0D);
+      assertTrue(sumf.doubleValue() == 18.0D);
+      assertTrue(mini.doubleValue() == 0.0D);
+      assertTrue(minf.doubleValue() == 1.0D);
+      assertTrue(maxi.doubleValue() == 14.0D);
+      assertTrue(maxf.doubleValue() == 10.0D);
+      assertTrue(avgi.doubleValue() == 4.25D);
+      assertTrue(avgf.doubleValue() == 4.5D);
+      assertTrue(count.doubleValue() == 4);
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
 
-    assertTrue(bucket.equals("hello4"));
-    assertTrue(sumi.longValue() == 15);
-    assertTrue(sumf.doubleValue() == 11.0D);
-    assertTrue(mini.doubleValue() == 4.0D);
-    assertTrue(minf.doubleValue() == 4.0D);
-    assertTrue(maxi.doubleValue() == 11.0D);
-    assertTrue(maxf.doubleValue() == 7.0D);
-    assertTrue(avgi.doubleValue() == 7.5D);
-    assertTrue(avgf.doubleValue() == 5.5D);
-    assertTrue(count.doubleValue() == 2);
+      assertTrue(bucket.equals("hello3"));
+      assertTrue(sumi.doubleValue() == 38.0D);
+      assertTrue(sumf.doubleValue() == 26.0D);
+      assertTrue(mini.doubleValue() == 3.0D);
+      assertTrue(minf.doubleValue() == 3.0D);
+      assertTrue(maxi.doubleValue() == 13.0D);
+      assertTrue(maxf.doubleValue() == 9.0D);
+      assertTrue(avgi.doubleValue() == 9.5D);
+      assertTrue(avgf.doubleValue() == 6.5D);
+      assertTrue(count.doubleValue() == 4);
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
 
+      assertTrue(bucket.equals("hello4"));
+      assertTrue(sumi.longValue() == 15);
+      assertTrue(sumf.doubleValue() == 11.0D);
+      assertTrue(mini.doubleValue() == 4.0D);
+      assertTrue(minf.doubleValue() == 4.0D);
+      assertTrue(maxi.doubleValue() == 11.0D);
+      assertTrue(maxf.doubleValue() == 7.0D);
+      assertTrue(avgi.doubleValue() == 7.5D);
+      assertTrue(avgf.doubleValue() == 5.5D);
+      assertTrue(count.doubleValue() == 2);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -1894,52 +2208,62 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamExpression expression;
     TupleStream stream;
     List<Tuple> tuples;
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
     
     StreamFactory factory = new StreamFactory()
       .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress())
       .withFunctionName("search", CloudSolrStream.class)
       .withFunctionName("innerJoin", InnerJoinStream.class);
-    
-    // Basic test
-    expression = StreamExpressionParser.parse("innerJoin("
-                                                + "search(" + COLLECTIONORALIAS + ", q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc, id asc\"),"
-                                                + "search(" + COLLECTIONORALIAS + ", q=\"side_s:right\", fl=\"join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc\"),"
-                                                + "on=\"join1_i=join1_i, join2_s=join2_s\")");
-    stream = new InnerJoinStrea

<TRUNCATED>

[03/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
index 2f2273e..0de3aa0 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
@@ -131,13 +131,20 @@ public void testUniqueStream() throws Exception {
       .add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1")
       .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-  SolrParams sParams = StreamingTest.mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc");
-  CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
-  UniqueStream ustream = new UniqueStream(stream, new FieldEqualitor("a_f"));
-  List<Tuple> tuples = getTuples(ustream);
-  assertEquals(4, tuples.size());
-  assertOrder(tuples, 0,1,3,4);
-
+  StreamContext streamContext = new StreamContext();
+  SolrClientCache solrClientCache = new SolrClientCache();
+  streamContext.setSolrClientCache(solrClientCache);
+  try {
+    SolrParams sParams = StreamingTest.mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc");
+    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
+    UniqueStream ustream = new UniqueStream(stream, new FieldEqualitor("a_f"));
+    ustream.setStreamContext(streamContext);
+    List<Tuple> tuples = getTuples(ustream);
+    assertEquals(4, tuples.size());
+    assertOrder(tuples, 0, 1, 3, 4);
+  } finally {
+    solrClientCache.close();
+  }
 }
 
 @Test
@@ -167,15 +174,22 @@ public void testNonePartitionKeys() throws Exception {
       .add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9")
       .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
       .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+  StreamContext streamContext = new StreamContext();
+  SolrClientCache solrClientCache = new SolrClientCache();
+  streamContext.setSolrClientCache(solrClientCache);
+  try {
 
-  SolrParams sParamsA = StreamingTest.mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "none");
-  CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-  ParallelStream pstream = parallelStream(stream, new FieldComparator("a_s", ComparatorOrder.ASCENDING));
-  attachStreamFactory(pstream);
-  List<Tuple> tuples = getTuples(pstream);
-
-  assert(tuples.size() == (10 * numWorkers)); // Each tuple will be double counted.
+    SolrParams sParamsA = StreamingTest.mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "none");
+    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+    ParallelStream pstream = parallelStream(stream, new FieldComparator("a_s", ComparatorOrder.ASCENDING));
+    attachStreamFactory(pstream);
+    pstream.setStreamContext(streamContext);
+    List<Tuple> tuples = getTuples(pstream);
 
+    assert (tuples.size() == (10 * numWorkers)); // Each tuple will be double counted.
+  } finally {
+    solrClientCache.close();
+  }
 }
 
 @Test
@@ -193,19 +207,29 @@ public void testParallelUniqueStream() throws Exception {
       .add(id, "8", "a_s", "hello1", "a_i", "13", "a_f", "4")
       .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-  SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc", "partitionKeys", "a_f");
-  CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
-  UniqueStream ustream = new UniqueStream(stream, new FieldEqualitor("a_f"));
-  ParallelStream pstream = parallelStream(ustream, new FieldComparator("a_f", ComparatorOrder.ASCENDING));
-  attachStreamFactory(pstream);
-  List<Tuple> tuples = getTuples(pstream);
-  assertEquals(5, tuples.size());
-  assertOrder(tuples, 0, 1, 3, 4, 6);
+  StreamContext streamContext = new StreamContext();
+  SolrClientCache solrClientCache = new SolrClientCache();
+  streamContext.setSolrClientCache(solrClientCache);
+
+  try {
 
-  //Test the eofTuples
+    SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc", "partitionKeys", "a_f");
+    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
+    UniqueStream ustream = new UniqueStream(stream, new FieldEqualitor("a_f"));
+    ParallelStream pstream = parallelStream(ustream, new FieldComparator("a_f", ComparatorOrder.ASCENDING));
+    attachStreamFactory(pstream);
+    pstream.setStreamContext(streamContext);
+    List<Tuple> tuples = getTuples(pstream);
+    assertEquals(5, tuples.size());
+    assertOrder(tuples, 0, 1, 3, 4, 6);
 
-  Map<String,Tuple> eofTuples = pstream.getEofTuples();
-  assertEquals(numWorkers, eofTuples.size()); //There should be an EOF tuple for each worker.
+    //Test the eofTuples
+
+    Map<String, Tuple> eofTuples = pstream.getEofTuples();
+    assertEquals(numWorkers, eofTuples.size()); //There should be an EOF tuple for each worker.
+  }finally {
+    solrClientCache.close();
+  }
 
 }
 
@@ -226,12 +250,21 @@ public void testMultipleFqClauses() throws Exception {
 
   streamFactory.withCollectionZkHost(COLLECTIONORALIAS, zkHost);
 
-  ModifiableSolrParams params = new ModifiableSolrParams(mapParams("q", "*:*", "fl", "id,a_i", 
-      "sort", "a_i asc", "fq", "a_ss:hello0", "fq", "a_ss:hello1"));
-  CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, params);
-  List<Tuple> tuples = getTuples(stream);
-  assertEquals("Multiple fq clauses should have been honored", 1, tuples.size());
-  assertEquals("should only have gotten back document 0", "0", tuples.get(0).getString("id"));
+  StreamContext streamContext = new StreamContext();
+  SolrClientCache solrClientCache = new SolrClientCache();
+  streamContext.setSolrClientCache(solrClientCache);
+
+  try {
+    ModifiableSolrParams params = new ModifiableSolrParams(mapParams("q", "*:*", "fl", "id,a_i",
+        "sort", "a_i asc", "fq", "a_ss:hello0", "fq", "a_ss:hello1"));
+    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, params);
+    stream.setStreamContext(streamContext);
+    List<Tuple> tuples = getTuples(stream);
+    assertEquals("Multiple fq clauses should have been honored", 1, tuples.size());
+    assertEquals("should only have gotten back document 0", "0", tuples.get(0).getString("id"));
+  } finally {
+    solrClientCache.close();
+  }
 }
 
 @Test
@@ -245,15 +278,20 @@ public void testRankStream() throws Exception {
       .add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1")
       .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-
-  SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc");
-  CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
-  RankStream rstream = new RankStream(stream, 3, new FieldComparator("a_i",ComparatorOrder.DESCENDING));
-  List<Tuple> tuples = getTuples(rstream);
-
-  assertEquals(3, tuples.size());
-  assertOrder(tuples, 4,3,2);
-
+  StreamContext streamContext = new StreamContext();
+  SolrClientCache solrClientCache = new SolrClientCache();
+  streamContext.setSolrClientCache(solrClientCache);
+  try {
+    SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc");
+    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
+    RankStream rstream = new RankStream(stream, 3, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
+    rstream.setStreamContext(streamContext);
+    List<Tuple> tuples = getTuples(rstream);
+    assertEquals(3, tuples.size());
+    assertOrder(tuples, 4, 3, 2);
+  } finally {
+    solrClientCache.close();
+  }
 }
 
 @Test
@@ -272,22 +310,30 @@ public void testParallelRankStream() throws Exception {
       .add(id, "10", "a_s", "hello1", "a_i", "10", "a_f", "1")
       .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-  SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i");
-  CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
-  RankStream rstream = new RankStream(stream, 11, new FieldComparator("a_i",ComparatorOrder.DESCENDING));
-  ParallelStream pstream = parallelStream(rstream, new FieldComparator("a_i", ComparatorOrder.DESCENDING));    
-  attachStreamFactory(pstream);
-  List<Tuple> tuples = getTuples(pstream);
+  StreamContext streamContext = new StreamContext();
+  SolrClientCache solrClientCache = new SolrClientCache();
+  streamContext.setSolrClientCache(solrClientCache);
+  try {
+    SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i");
+    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
+    RankStream rstream = new RankStream(stream, 11, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
+    ParallelStream pstream = parallelStream(rstream, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
+    attachStreamFactory(pstream);
+    pstream.setStreamContext(streamContext);
+    List<Tuple> tuples = getTuples(pstream);
 
-  assertEquals(10, tuples.size());
-  assertOrder(tuples, 10,9,8,7,6,5,4,3,2,0);
+    assertEquals(10, tuples.size());
+    assertOrder(tuples, 10, 9, 8, 7, 6, 5, 4, 3, 2, 0);
+  } finally {
+    solrClientCache.close();
+  }
 
 }
 
 @Test
-public void testTrace() throws Exception {
+  public void testTrace() throws Exception {
 
-  new UpdateRequest()
+    new UpdateRequest()
       .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1")
       .add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2")
       .add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3")
@@ -300,15 +346,24 @@ public void testTrace() throws Exception {
       .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
       .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-  //Test with spaces in the parameter lists.
-  SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i,a_f", "sort", "a_s asc,a_f   asc");
-  CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-  stream.setTrace(true);
-  List<Tuple> tuples = getTuples(stream);
-    assertEquals(COLLECTIONORALIAS, tuples.get(0).get("_COLLECTION_"));
-    assertEquals(COLLECTIONORALIAS, tuples.get(1).get("_COLLECTION_"));
-    assertEquals(COLLECTIONORALIAS, tuples.get(2).get("_COLLECTION_"));
-    assertEquals(COLLECTIONORALIAS, tuples.get(3).get("_COLLECTION_"));
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      //Test with spaces in the parameter lists.
+      SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i,a_f", "sort", "a_s asc,a_f   asc");
+      CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      stream.setTrace(true);
+      stream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(stream);
+      assertEquals(COLLECTIONORALIAS, tuples.get(0).get("_COLLECTION_"));
+      assertEquals(COLLECTIONORALIAS, tuples.get(1).get("_COLLECTION_"));
+      assertEquals(COLLECTIONORALIAS, tuples.get(2).get("_COLLECTION_"));
+      assertEquals(COLLECTIONORALIAS, tuples.get(3).get("_COLLECTION_"));
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -327,52 +382,60 @@ public void testTrace() throws Exception {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    //Test with spaces in the parameter lists.
-    SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i,  a_f", "sort", "a_s asc  ,  a_f   asc");
-    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-    ReducerStream rstream  = new ReducerStream(stream,
-                                               new FieldEqualitor("a_s"),
-                                               new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5));
-
-    List<Tuple> tuples = getTuples(rstream);
-
-    assertEquals(3, tuples.size());
-
-    Tuple t0 = tuples.get(0);
-    List<Map> maps0 = t0.getMaps("group");
-    assertMaps(maps0, 0, 2, 1, 9);
-
-    Tuple t1 = tuples.get(1);
-    List<Map> maps1 = t1.getMaps("group");
-    assertMaps(maps1, 3, 5, 7, 8);
-
-    Tuple t2 = tuples.get(2);
-    List<Map> maps2 = t2.getMaps("group");
-    assertMaps(maps2, 4, 6);
-
-    //Test with spaces in the parameter lists using a comparator
-    sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i,  a_f", "sort", "a_s asc  ,  a_f   asc");
-    stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-    rstream = new ReducerStream(stream,
-                                new FieldComparator("a_s", ComparatorOrder.ASCENDING),
-                                new GroupOperation(new FieldComparator("a_f", ComparatorOrder.DESCENDING), 5));
-
-    tuples = getTuples(rstream);
-
-    assertEquals(3, tuples.size());
-
-    t0 = tuples.get(0);
-    maps0 = t0.getMaps("group");
-    assertMaps(maps0, 9, 1, 2, 0);
-
-    t1 = tuples.get(1);
-    maps1 = t1.getMaps("group");
-    assertMaps(maps1, 8, 7, 5, 3);
-
-    t2 = tuples.get(2);
-    maps2 = t2.getMaps("group");
-    assertMaps(maps2, 6, 4);
-
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      //Test with spaces in the parameter lists.
+      SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i,  a_f", "sort", "a_s asc  ,  a_f   asc");
+      CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      ReducerStream rstream = new ReducerStream(stream,
+          new FieldEqualitor("a_s"),
+          new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5));
+
+      rstream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(rstream);
+
+      assertEquals(3, tuples.size());
+
+      Tuple t0 = tuples.get(0);
+      List<Map> maps0 = t0.getMaps("group");
+      assertMaps(maps0, 0, 2, 1, 9);
+
+      Tuple t1 = tuples.get(1);
+      List<Map> maps1 = t1.getMaps("group");
+      assertMaps(maps1, 3, 5, 7, 8);
+
+      Tuple t2 = tuples.get(2);
+      List<Map> maps2 = t2.getMaps("group");
+      assertMaps(maps2, 4, 6);
+
+      //Test with spaces in the parameter lists using a comparator
+      sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i,  a_f", "sort", "a_s asc  ,  a_f   asc");
+      stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      rstream = new ReducerStream(stream,
+          new FieldComparator("a_s", ComparatorOrder.ASCENDING),
+          new GroupOperation(new FieldComparator("a_f", ComparatorOrder.DESCENDING), 5));
+      rstream.setStreamContext(streamContext);
+      tuples = getTuples(rstream);
+
+      assertEquals(3, tuples.size());
+
+      t0 = tuples.get(0);
+      maps0 = t0.getMaps("group");
+      assertMaps(maps0, 9, 1, 2, 0);
+
+      t1 = tuples.get(1);
+      maps1 = t1.getMaps("group");
+      assertMaps(maps1, 8, 7, 5, 3);
+
+      t2 = tuples.get(2);
+      maps2 = t2.getMaps("group");
+      assertMaps(maps2, 6, 4);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -392,17 +455,24 @@ public void testTrace() throws Exception {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    //Test with spaces in the parameter lists.
-    SolrParams sParamsA = mapParams("q", "blah", "fl", "id,a_s, a_i,  a_f", "sort", "a_s asc  ,  a_f   asc");
-    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-    ReducerStream rstream = new ReducerStream(stream,
-                                              new FieldEqualitor("a_s"),
-                                              new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5));
-
-    List<Tuple> tuples = getTuples(rstream);
-
-    assertEquals(0, tuples.size());
-
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      //Test with spaces in the parameter lists.
+      SolrParams sParamsA = mapParams("q", "blah", "fl", "id,a_s, a_i,  a_f", "sort", "a_s asc  ,  a_f   asc");
+      CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      ReducerStream rstream = new ReducerStream(stream,
+          new FieldEqualitor("a_s"),
+          new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5));
+      rstream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(rstream);
+
+      assertEquals(0, tuples.size());
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -421,56 +491,65 @@ public void testTrace() throws Exception {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "a_s");
-    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
 
-    ReducerStream rstream = new ReducerStream(stream,
-                                              new FieldEqualitor("a_s"),
-                                              new GroupOperation(new FieldComparator("a_f", ComparatorOrder.DESCENDING), 5));
-    ParallelStream pstream = parallelStream(rstream, new FieldComparator("a_s", ComparatorOrder.ASCENDING));    
-    attachStreamFactory(pstream);
-    List<Tuple> tuples = getTuples(pstream);
+    try {
+      SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "a_s");
+      CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
 
-    assertEquals(3, tuples.size());
+      ReducerStream rstream = new ReducerStream(stream,
+          new FieldEqualitor("a_s"),
+          new GroupOperation(new FieldComparator("a_f", ComparatorOrder.DESCENDING), 5));
+      ParallelStream pstream = parallelStream(rstream, new FieldComparator("a_s", ComparatorOrder.ASCENDING));
+      attachStreamFactory(pstream);
+      pstream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(pstream);
 
-    Tuple t0 = tuples.get(0);
-    List<Map> maps0 = t0.getMaps("group");
-    assertMaps(maps0, 9, 1, 2, 0);
+      assertEquals(3, tuples.size());
 
-    Tuple t1 = tuples.get(1);
-    List<Map> maps1 = t1.getMaps("group");
-    assertMaps(maps1, 8, 7, 5, 3);
+      Tuple t0 = tuples.get(0);
+      List<Map> maps0 = t0.getMaps("group");
+      assertMaps(maps0, 9, 1, 2, 0);
 
-    Tuple t2 = tuples.get(2);
-    List<Map> maps2 = t2.getMaps("group");
-    assertMaps(maps2, 6, 4);
+      Tuple t1 = tuples.get(1);
+      List<Map> maps1 = t1.getMaps("group");
+      assertMaps(maps1, 8, 7, 5, 3);
 
-    //Test Descending with Ascending subsort
+      Tuple t2 = tuples.get(2);
+      List<Map> maps2 = t2.getMaps("group");
+      assertMaps(maps2, 6, 4);
 
-    sParamsA = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s desc,a_f asc", "partitionKeys", "a_s");
-    stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      //Test Descending with Ascending subsort
 
-    rstream = new ReducerStream(stream,
-                                new FieldEqualitor("a_s"),
-                                new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 3));
-    pstream = parallelStream(rstream, new FieldComparator("a_s", ComparatorOrder.DESCENDING));
-    attachStreamFactory(pstream);
-    tuples = getTuples(pstream);
+      sParamsA = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s desc,a_f asc", "partitionKeys", "a_s");
+      stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
 
-    assertEquals(3, tuples.size());
+      rstream = new ReducerStream(stream,
+          new FieldEqualitor("a_s"),
+          new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 3));
+      pstream = parallelStream(rstream, new FieldComparator("a_s", ComparatorOrder.DESCENDING));
+      attachStreamFactory(pstream);
+      pstream.setStreamContext(streamContext);
+      tuples = getTuples(pstream);
 
-    t0 = tuples.get(0);
-    maps0 = t0.getMaps("group");
-    assertMaps(maps0, 4, 6);
+      assertEquals(3, tuples.size());
 
-    t1 = tuples.get(1);
-    maps1 = t1.getMaps("group");
-    assertMaps(maps1, 3, 5, 7);
+      t0 = tuples.get(0);
+      maps0 = t0.getMaps("group");
+      assertMaps(maps0, 4, 6);
 
-    t2 = tuples.get(2);
-    maps2 = t2.getMaps("group");
-    assertMaps(maps2, 0, 2, 1);
+      t1 = tuples.get(1);
+      maps1 = t1.getMaps("group");
+      assertMaps(maps1, 3, 5, 7);
 
+      t2 = tuples.get(2);
+      maps2 = t2.getMaps("group");
+      assertMaps(maps2, 0, 2, 1);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -490,24 +569,33 @@ public void testTrace() throws Exception {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
     //Test an error that comes originates from the /select handler
-    SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc");
-    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-    ExceptionStream estream = new ExceptionStream(stream);
-    Tuple t = getTuple(estream);
-    assertTrue(t.EOF);
-    assertTrue(t.EXCEPTION);
-    assertTrue(t.getException().contains("sort param field can't be found: blah"));
-
-    //Test an error that comes originates from the /export handler
-    sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt", "/export");
-    stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-    estream = new ExceptionStream(stream);
-    t = getTuple(estream);
-    assertTrue(t.EOF);
-    assertTrue(t.EXCEPTION);
-    //The /export handler will pass through a real exception.
-    assertTrue(t.getException().contains("undefined field:"));
+    try {
+      SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc");
+      CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      ExceptionStream estream = new ExceptionStream(stream);
+      estream.setStreamContext(streamContext);
+      Tuple t = getTuple(estream);
+      assertTrue(t.EOF);
+      assertTrue(t.EXCEPTION);
+      assertTrue(t.getException().contains("sort param field can't be found: blah"));
+
+      //Test an error that comes originates from the /export handler
+      sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt", "/export");
+      stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      estream = new ExceptionStream(stream);
+      estream.setStreamContext(streamContext);
+      t = getTuple(estream);
+      assertTrue(t.EOF);
+      assertTrue(t.EXCEPTION);
+      //The /export handler will pass through a real exception.
+      assertTrue(t.getException().contains("undefined field:"));
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -577,48 +665,55 @@ public void testTrace() throws Exception {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    SolrParams sParamsA = mapParams("q", "*:*");
-
-    Metric[] metrics = {new SumMetric("a_i"),
-                        new SumMetric("a_f"),
-                        new MinMetric("a_i"),
-                        new MinMetric("a_f"),
-                        new MaxMetric("a_i"),
-                        new MaxMetric("a_f"),
-                        new MeanMetric("a_i"),
-                        new MeanMetric("a_f"),
-                        new CountMetric()};
-
-    StatsStream statsStream = new StatsStream(zkHost, COLLECTIONORALIAS, sParamsA, metrics);
-
-    List<Tuple> tuples = getTuples(statsStream);
-
-    assertEquals(1, tuples.size());
-
-    //Test Long and Double Sums
-
-    Tuple tuple = tuples.get(0);
-
-    Double sumi = tuple.getDouble("sum(a_i)");
-    Double sumf = tuple.getDouble("sum(a_f)");
-    Double mini = tuple.getDouble("min(a_i)");
-    Double minf = tuple.getDouble("min(a_f)");
-    Double maxi = tuple.getDouble("max(a_i)");
-    Double maxf = tuple.getDouble("max(a_f)");
-    Double avgi = tuple.getDouble("avg(a_i)");
-    Double avgf = tuple.getDouble("avg(a_f)");
-    Double count = tuple.getDouble("count(*)");
-
-    assertEquals(70, sumi.longValue());
-    assertEquals(55.0, sumf.doubleValue(), 0.01);
-    assertEquals(0.0, mini.doubleValue(), 0.01);
-    assertEquals(1.0, minf.doubleValue(), 0.01);
-    assertEquals(14.0, maxi.doubleValue(), 0.01);
-    assertEquals(10.0, maxf.doubleValue(), 0.01);
-    assertEquals(7.0, avgi.doubleValue(), .01);
-    assertEquals(5.5, avgf.doubleValue(), .001);
-    assertEquals(10, count.doubleValue(), .01);
-
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      SolrParams sParamsA = mapParams("q", "*:*");
+
+      Metric[] metrics = {new SumMetric("a_i"),
+          new SumMetric("a_f"),
+          new MinMetric("a_i"),
+          new MinMetric("a_f"),
+          new MaxMetric("a_i"),
+          new MaxMetric("a_f"),
+          new MeanMetric("a_i"),
+          new MeanMetric("a_f"),
+          new CountMetric()};
+
+      StatsStream statsStream = new StatsStream(zkHost, COLLECTIONORALIAS, sParamsA, metrics);
+      statsStream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(statsStream);
+
+      assertEquals(1, tuples.size());
+
+      //Test Long and Double Sums
+
+      Tuple tuple = tuples.get(0);
+
+      Double sumi = tuple.getDouble("sum(a_i)");
+      Double sumf = tuple.getDouble("sum(a_f)");
+      Double mini = tuple.getDouble("min(a_i)");
+      Double minf = tuple.getDouble("min(a_f)");
+      Double maxi = tuple.getDouble("max(a_i)");
+      Double maxf = tuple.getDouble("max(a_f)");
+      Double avgi = tuple.getDouble("avg(a_i)");
+      Double avgf = tuple.getDouble("avg(a_f)");
+      Double count = tuple.getDouble("count(*)");
+
+      assertEquals(70, sumi.longValue());
+      assertEquals(55.0, sumf.doubleValue(), 0.01);
+      assertEquals(0.0, mini.doubleValue(), 0.01);
+      assertEquals(1.0, minf.doubleValue(), 0.01);
+      assertEquals(14.0, maxi.doubleValue(), 0.01);
+      assertEquals(10.0, maxf.doubleValue(), 0.01);
+      assertEquals(7.0, avgi.doubleValue(), .01);
+      assertEquals(5.5, avgf.doubleValue(), .001);
+      assertEquals(10, count.doubleValue(), .01);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -637,344 +732,352 @@ public void testTrace() throws Exception {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc");
-
-    Bucket[] buckets =  {new Bucket("a_s")};
-
-    Metric[] metrics = {new SumMetric("a_i"),
-                        new SumMetric("a_f"),
-                        new MinMetric("a_i"),
-                        new MinMetric("a_f"),
-                        new MaxMetric("a_i"),
-                        new MaxMetric("a_f"),
-                        new MeanMetric("a_i"),
-                        new MeanMetric("a_f"),
-                        new CountMetric()};
-
-    FieldComparator[] sorts = {new FieldComparator("sum(a_i)",
-                                                   ComparatorOrder.ASCENDING)};
-
-    FacetStream facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
-
-    List<Tuple> tuples = getTuples(facetStream);
-
-    assert(tuples.size() == 3);
-
-    //Test Long and Double Sums
-
-    Tuple tuple = tuples.get(0);
-    String bucket = tuple.getString("a_s");
-    Double sumi = tuple.getDouble("sum(a_i)");
-    Double sumf = tuple.getDouble("sum(a_f)");
-    Double mini = tuple.getDouble("min(a_i)");
-    Double minf = tuple.getDouble("min(a_f)");
-    Double maxi = tuple.getDouble("max(a_i)");
-    Double maxf = tuple.getDouble("max(a_f)");
-    Double avgi = tuple.getDouble("avg(a_i)");
-    Double avgf = tuple.getDouble("avg(a_f)");
-    Double count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket);
-    assertEquals(15, sumi.longValue());
-    assertEquals(11.0, sumf.doubleValue(), 0.01);
-    assertEquals(4.0, mini.doubleValue(), 0.01);
-    assertEquals(4.0, minf.doubleValue(), 0.01);
-    assertEquals(11.0, maxi.doubleValue(), 0.01);
-    assertEquals(7.0, maxf.doubleValue(), 0.01);
-    assertEquals(7.5, avgi.doubleValue(), 0.01);
-    assertEquals(5.5, avgf.doubleValue(), 0.01);
-    assertEquals(2, count.doubleValue(), 0.01);
-
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket);
-    assertEquals(17, sumi.doubleValue(), .01);
-    assertEquals(18, sumf.doubleValue(), .01);
-    assertEquals(0.0, mini.doubleValue(), .01);
-    assertEquals(1.0, minf.doubleValue(), .01);
-    assertEquals(14.0, maxi.doubleValue(), .01);
-    assertEquals(10.0, maxf.doubleValue(), .01);
-    assertEquals(4.25, avgi.doubleValue(), .01);
-    assertEquals(4.5, avgf.doubleValue(), .01);
-    assertEquals(4, count.doubleValue(), .01);
-
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket);
-    assertEquals(38.0, sumi.doubleValue(), 0.01);
-    assertEquals(26.0, sumf.doubleValue(), 0.01);
-    assertEquals(3.0, mini.doubleValue(), 0.01);
-    assertEquals(3.0, minf.doubleValue(), 0.01);
-    assertEquals(13.0, maxi.doubleValue(), 0.01);
-    assertEquals(9.0, maxf.doubleValue(), 0.01);
-    assertEquals(9.5, avgi.doubleValue(), 0.01);
-    assertEquals(6.5, avgf.doubleValue(), 0.01);
-    assertEquals(4, count.doubleValue(), 0.01);
-
-
-    //Reverse the Sort.
-
-    sorts[0] = new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING);
-
-    facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
-
-    tuples = getTuples(facetStream);
-
-    assertEquals(3, tuples.size());
-
-    //Test Long and Double Sums
-
-    tuple = tuples.get(0);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket);
-    assertEquals(38, sumi.doubleValue(), 0.1);
-    assertEquals(26, sumf.doubleValue(), 0.1);
-    assertEquals(3, mini.doubleValue(), 0.1);
-    assertEquals(3, minf.doubleValue(), 0.1);
-    assertEquals(13, maxi.doubleValue(), 0.1);
-    assertEquals(9, maxf.doubleValue(), 0.1);
-    assertEquals(9.5, avgi.doubleValue(), 0.1);
-    assertEquals(6.5, avgf.doubleValue(), 0.1);
-    assertEquals(4, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket);
-    assertEquals(17, sumi.doubleValue(), 0.01);
-    assertEquals(18, sumf.doubleValue(), 0.01);
-    assertEquals(0, mini.doubleValue(), 0.01);
-    assertEquals(1, minf.doubleValue(), 0.01);
-    assertEquals(14, maxi.doubleValue(), 0.01);
-    assertEquals(10, maxf.doubleValue(), 0.01);
-    assertEquals(4.25, avgi.doubleValue(), 0.01);
-    assertEquals(4.5, avgf.doubleValue(), 0.01);
-    assertEquals(4, count.doubleValue(), 0.01);
-
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket);
-    assertEquals(15, sumi.longValue());
-    assertEquals(11, sumf.doubleValue(), 0.01);
-    assertEquals(4.0, mini.doubleValue(), 0.01);
-    assertEquals(4.0, minf.doubleValue(), 0.01);
-    assertEquals(11.0, maxi.doubleValue(), 0.01);
-    assertEquals(7.0, maxf.doubleValue(), 0.01);
-    assertEquals(7.5, avgi.doubleValue(), 0.01);
-    assertEquals(5.5, avgf.doubleValue(), 0.01);
-    assertEquals(2, count.doubleValue(), 0.01);
-
-
-    //Test index sort
-
-    sorts[0] = new FieldComparator("a_s", ComparatorOrder.DESCENDING);
-
-
-    facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
-
-    tuples = getTuples(facetStream);
-
-    assertEquals(3, tuples.size());
-
-
-    tuple = tuples.get(0);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-
-    assertEquals("hello4", bucket);
-    assertEquals(15, sumi.longValue());
-    assertEquals(11, sumf.doubleValue(), 0.01);
-    assertEquals(4, mini.doubleValue(), 0.01);
-    assertEquals(4, minf.doubleValue(), 0.01);
-    assertEquals(11, maxi.doubleValue(), 0.01);
-    assertEquals(7, maxf.doubleValue(), 0.01);
-    assertEquals(7.5, avgi.doubleValue(), 0.01);
-    assertEquals(5.5, avgf.doubleValue(), 0.01);
-    assertEquals(2, count.doubleValue(), 0.01);
-
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertTrue(bucket.equals("hello3"));
-    assertTrue(sumi.doubleValue() == 38.0D);
-    assertTrue(sumf.doubleValue() == 26.0D);
-    assertTrue(mini.doubleValue() == 3.0D);
-    assertTrue(minf.doubleValue() == 3.0D);
-    assertTrue(maxi.doubleValue() == 13.0D);
-    assertTrue(maxf.doubleValue() == 9.0D);
-    assertTrue(avgi.doubleValue() == 9.5D);
-    assertTrue(avgf.doubleValue() == 6.5D);
-    assertTrue(count.doubleValue() == 4);
-
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket);
-    assertEquals(17, sumi.doubleValue(), 0.01);
-    assertEquals(18, sumf.doubleValue(), 0.01);
-    assertEquals(0, mini.doubleValue(), 0.01);
-    assertEquals(1, minf.doubleValue(), 0.01);
-    assertEquals(14, maxi.doubleValue(), 0.01);
-    assertEquals(10, maxf.doubleValue(), 0.01);
-    assertEquals(4.25, avgi.doubleValue(), 0.01);
-    assertEquals(4.5, avgf.doubleValue(), 0.01);
-    assertEquals(4, count.doubleValue(), 0.01);
-
-    //Test index sort
-
-    sorts[0] = new FieldComparator("a_s", ComparatorOrder.ASCENDING);
-
-    facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
-
-    tuples = getTuples(facetStream);
-
-    assertEquals(3, tuples.size());
-
-    tuple = tuples.get(0);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket);
-    assertEquals(17, sumi.doubleValue(), 0.01);
-    assertEquals(18, sumf.doubleValue(), 0.01);
-    assertEquals(0, mini.doubleValue(), 0.01);
-    assertEquals(1, minf.doubleValue(), 0.01);
-    assertEquals(14, maxi.doubleValue(), 0.01);
-    assertEquals(10, maxf.doubleValue(), 0.01);
-    assertEquals(4.25, avgi.doubleValue(), 0.0001);
-    assertEquals(4.5, avgf.doubleValue(), 0.001);
-    assertEquals(4, count.doubleValue(), 0.01);
-
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket);
-    assertEquals(38, sumi.doubleValue(), 0.01);
-    assertEquals(26, sumf.doubleValue(), 0.01);
-    assertEquals(3, mini.doubleValue(), 0.01);
-    assertEquals(3, minf.doubleValue(), 0.01);
-    assertEquals(13, maxi.doubleValue(), 0.01);
-    assertEquals(9, maxf.doubleValue(), 0.01);
-    assertEquals(9.5, avgi.doubleValue(), 0.01);
-    assertEquals(6.5, avgf.doubleValue(), 0.01);
-    assertEquals(4, count.doubleValue(), 0.01);
-
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket);
-    assertEquals(15, sumi.longValue());
-    assertEquals(11.0, sumf.doubleValue(), 0.1);
-    assertEquals(4.0, mini.doubleValue(), 0.1);
-    assertEquals(4.0, minf.doubleValue(), 0.1);
-    assertEquals(11.0, maxi.doubleValue(), 0.1);
-    assertEquals(7.0, maxf.doubleValue(), 0.1);
-    assertEquals(7.5, avgi.doubleValue(), 0.1);
-    assertEquals(5.5, avgf.doubleValue(), 0.1);
-    assertEquals(2, count.doubleValue(), 0.1);
-
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc");
+
+      Bucket[] buckets = {new Bucket("a_s")};
+
+      Metric[] metrics = {new SumMetric("a_i"),
+          new SumMetric("a_f"),
+          new MinMetric("a_i"),
+          new MinMetric("a_f"),
+          new MaxMetric("a_i"),
+          new MaxMetric("a_f"),
+          new MeanMetric("a_i"),
+          new MeanMetric("a_f"),
+          new CountMetric()};
+
+      FieldComparator[] sorts = {new FieldComparator("sum(a_i)",
+          ComparatorOrder.ASCENDING)};
+
+      FacetStream facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
+
+      List<Tuple> tuples = getTuples(facetStream);
+
+      assert (tuples.size() == 3);
+
+      //Test Long and Double Sums
+
+      Tuple tuple = tuples.get(0);
+      String bucket = tuple.getString("a_s");
+      Double sumi = tuple.getDouble("sum(a_i)");
+      Double sumf = tuple.getDouble("sum(a_f)");
+      Double mini = tuple.getDouble("min(a_i)");
+      Double minf = tuple.getDouble("min(a_f)");
+      Double maxi = tuple.getDouble("max(a_i)");
+      Double maxf = tuple.getDouble("max(a_f)");
+      Double avgi = tuple.getDouble("avg(a_i)");
+      Double avgf = tuple.getDouble("avg(a_f)");
+      Double count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket);
+      assertEquals(15, sumi.longValue());
+      assertEquals(11.0, sumf.doubleValue(), 0.01);
+      assertEquals(4.0, mini.doubleValue(), 0.01);
+      assertEquals(4.0, minf.doubleValue(), 0.01);
+      assertEquals(11.0, maxi.doubleValue(), 0.01);
+      assertEquals(7.0, maxf.doubleValue(), 0.01);
+      assertEquals(7.5, avgi.doubleValue(), 0.01);
+      assertEquals(5.5, avgf.doubleValue(), 0.01);
+      assertEquals(2, count.doubleValue(), 0.01);
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket);
+      assertEquals(17, sumi.doubleValue(), .01);
+      assertEquals(18, sumf.doubleValue(), .01);
+      assertEquals(0.0, mini.doubleValue(), .01);
+      assertEquals(1.0, minf.doubleValue(), .01);
+      assertEquals(14.0, maxi.doubleValue(), .01);
+      assertEquals(10.0, maxf.doubleValue(), .01);
+      assertEquals(4.25, avgi.doubleValue(), .01);
+      assertEquals(4.5, avgf.doubleValue(), .01);
+      assertEquals(4, count.doubleValue(), .01);
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket);
+      assertEquals(38.0, sumi.doubleValue(), 0.01);
+      assertEquals(26.0, sumf.doubleValue(), 0.01);
+      assertEquals(3.0, mini.doubleValue(), 0.01);
+      assertEquals(3.0, minf.doubleValue(), 0.01);
+      assertEquals(13.0, maxi.doubleValue(), 0.01);
+      assertEquals(9.0, maxf.doubleValue(), 0.01);
+      assertEquals(9.5, avgi.doubleValue(), 0.01);
+      assertEquals(6.5, avgf.doubleValue(), 0.01);
+      assertEquals(4, count.doubleValue(), 0.01);
+
+
+      //Reverse the Sort.
+
+      sorts[0] = new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING);
+
+      facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
+
+      tuples = getTuples(facetStream);
+
+      assertEquals(3, tuples.size());
+
+      //Test Long and Double Sums
+
+      tuple = tuples.get(0);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket);
+      assertEquals(38, sumi.doubleValue(), 0.1);
+      assertEquals(26, sumf.doubleValue(), 0.1);
+      assertEquals(3, mini.doubleValue(), 0.1);
+      assertEquals(3, minf.doubleValue(), 0.1);
+      assertEquals(13, maxi.doubleValue(), 0.1);
+      assertEquals(9, maxf.doubleValue(), 0.1);
+      assertEquals(9.5, avgi.doubleValue(), 0.1);
+      assertEquals(6.5, avgf.doubleValue(), 0.1);
+      assertEquals(4, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket);
+      assertEquals(17, sumi.doubleValue(), 0.01);
+      assertEquals(18, sumf.doubleValue(), 0.01);
+      assertEquals(0, mini.doubleValue(), 0.01);
+      assertEquals(1, minf.doubleValue(), 0.01);
+      assertEquals(14, maxi.doubleValue(), 0.01);
+      assertEquals(10, maxf.doubleValue(), 0.01);
+      assertEquals(4.25, avgi.doubleValue(), 0.01);
+      assertEquals(4.5, avgf.doubleValue(), 0.01);
+      assertEquals(4, count.doubleValue(), 0.01);
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket);
+      assertEquals(15, sumi.longValue());
+      assertEquals(11, sumf.doubleValue(), 0.01);
+      assertEquals(4.0, mini.doubleValue(), 0.01);
+      assertEquals(4.0, minf.doubleValue(), 0.01);
+      assertEquals(11.0, maxi.doubleValue(), 0.01);
+      assertEquals(7.0, maxf.doubleValue(), 0.01);
+      assertEquals(7.5, avgi.doubleValue(), 0.01);
+      assertEquals(5.5, avgf.doubleValue(), 0.01);
+      assertEquals(2, count.doubleValue(), 0.01);
+
+
+      //Test index sort
+
+      sorts[0] = new FieldComparator("a_s", ComparatorOrder.DESCENDING);
+
+
+      facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
+      facetStream.setStreamContext(streamContext);
+
+      tuples = getTuples(facetStream);
+
+      assertEquals(3, tuples.size());
+
+
+      tuple = tuples.get(0);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+
+      assertEquals("hello4", bucket);
+      assertEquals(15, sumi.longValue());
+      assertEquals(11, sumf.doubleValue(), 0.01);
+      assertEquals(4, mini.doubleValue(), 0.01);
+      assertEquals(4, minf.doubleValue(), 0.01);
+      assertEquals(11, maxi.doubleValue(), 0.01);
+      assertEquals(7, maxf.doubleValue(), 0.01);
+      assertEquals(7.5, avgi.doubleValue(), 0.01);
+      assertEquals(5.5, avgf.doubleValue(), 0.01);
+      assertEquals(2, count.doubleValue(), 0.01);
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertTrue(bucket.equals("hello3"));
+      assertTrue(sumi.doubleValue() == 38.0D);
+      assertTrue(sumf.doubleValue() == 26.0D);
+      assertTrue(mini.doubleValue() == 3.0D);
+      assertTrue(minf.doubleValue() == 3.0D);
+      assertTrue(maxi.doubleValue() == 13.0D);
+      assertTrue(maxf.doubleValue() == 9.0D);
+      assertTrue(avgi.doubleValue() == 9.5D);
+      assertTrue(avgf.doubleValue() == 6.5D);
+      assertTrue(count.doubleValue() == 4);
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket);
+      assertEquals(17, sumi.doubleValue(), 0.01);
+      assertEquals(18, sumf.doubleValue(), 0.01);
+      assertEquals(0, mini.doubleValue(), 0.01);
+      assertEquals(1, minf.doubleValue(), 0.01);
+      assertEquals(14, maxi.doubleValue(), 0.01);
+      assertEquals(10, maxf.doubleValue(), 0.01);
+      assertEquals(4.25, avgi.doubleValue(), 0.01);
+      assertEquals(4.5, avgf.doubleValue(), 0.01);
+      assertEquals(4, count.doubleValue(), 0.01);
+
+      //Test index sort
+
+      sorts[0] = new FieldComparator("a_s", ComparatorOrder.ASCENDING);
+
+      facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
+      facetStream.setStreamContext(streamContext);
+      tuples = getTuples(facetStream);
+
+      assertEquals(3, tuples.size());
+
+      tuple = tuples.get(0);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket);
+      assertEquals(17, sumi.doubleValue(), 0.01);
+      assertEquals(18, sumf.doubleValue(), 0.01);
+      assertEquals(0, mini.doubleValue(), 0.01);
+      assertEquals(1, minf.doubleValue(), 0.01);
+      assertEquals(14, maxi.doubleValue(), 0.01);
+      assertEquals(10, maxf.doubleValue(), 0.01);
+      assertEquals(4.25, avgi.doubleValue(), 0.0001);
+      assertEquals(4.5, avgf.doubleValue(), 0.001);
+      assertEquals(4, count.doubleValue(), 0.01);
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket);
+      assertEquals(38, sumi.doubleValue(), 0.01);
+      assertEquals(26, sumf.doubleValue(), 0.01);
+      assertEquals(3, mini.doubleValue(), 0.01);
+      assertEquals(3, minf.doubleValue(), 0.01);
+      assertEquals(13, maxi.doubleValue(), 0.01);
+      assertEquals(9, maxf.doubleValue(), 0.01);
+      assertEquals(9.5, avgi.doubleValue(), 0.01);
+      assertEquals(6.5, avgf.doubleValue(), 0.01);
+      assertEquals(4, count.doubleValue(), 0.01);
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket);
+      assertEquals(15, sumi.longValue());
+      assertEquals(11.0, sumf.doubleValue(), 0.1);
+      assertEquals(4.0, mini.doubleValue(), 0.1);
+      assertEquals(4.0, minf.doubleValue(), 0.1);
+      assertEquals(11.0, maxi.doubleValue(), 0.1);
+      assertEquals(7.0, maxf.doubleValue(), 0.1);
+      assertEquals(7.5, avgi.doubleValue(), 0.1);
+      assertEquals(5.5, avgf.doubleValue(), 0.1);
+      assertEquals(2, count.doubleValue(), 0.1);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
 
@@ -1042,7 +1145,11 @@ public void testTrace() throws Exception {
     List<String> selectOrder = ("asc".equals(sortDir)) ? Arrays.asList(ascOrder) : Arrays.asList(descOrder);
     List<String> selectOrderBool = ("asc".equals(sortDir)) ? Arrays.asList(ascOrderBool) : Arrays.asList(descOrderBool);
     SolrParams exportParams = mapParams("q", "*:*", "qt", "/export", "fl", "id," + field, "sort", field + " " + sortDir + ",id asc");
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
     try (CloudSolrStream solrStream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, exportParams)) {
+      solrStream.setStreamContext(streamContext);
       List<Tuple> tuples = getTuples(solrStream);
       assertEquals("There should be exactly 32 responses returned", 32, tuples.size());
       // Since the getTuples method doesn't return the EOF tuple, these two entries should be the same size.
@@ -1053,6 +1160,8 @@ public void testTrace() throws Exception {
                 "' RESTORE GETTING selectOrder from select statement after LUCENE-7548",
             tuples.get(idx).getString("id"), (field.startsWith("b_") ? selectOrderBool.get(idx) : selectOrder.get(idx)));
       }
+    } finally {
+      solrClientCache.close();
     }
   }
 
@@ -1081,7 +1190,12 @@ public void testTrace() throws Exception {
     }
     SolrParams sParams = mapParams("q", "*:*", "qt", "/export", "fl", fl.toString(), "sort", "id asc");
 
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
     try (CloudSolrStream solrStream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams)) {
+      solrStream.setStreamContext(streamContext);
       List<Tuple> tuples = getTuples(solrStream);
       assertEquals("There should be exactly 32 responses returned", 32, tuples.size());
 
@@ -1097,6 +1211,8 @@ public void testTrace() throws Exception {
           }
         }
       }
+    } finally {
+      solrClientCache.close();
     }
   }
 
@@ -1229,173 +1345,181 @@ public void testTrace() throws Exception {
         .add(id, "9", "level1_s", "hello0", "level2_s", "b", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_i,a_f");
-
-    Bucket[] buckets =  {new Bucket("level1_s"), new Bucket("level2_s")};
-
-    Metric[] metrics = {new SumMetric("a_i"),
-                        new CountMetric()};
-
-    FieldComparator[] sorts = {new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING), new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING)};
-
-    FacetStream facetStream = new FacetStream(
-        zkHost,
-        COLLECTIONORALIAS,
-        sParamsA,
-        buckets,
-        metrics,
-        sorts,
-        100);
-
-    List<Tuple> tuples = getTuples(facetStream);
-    assertEquals(6, tuples.size());
-
-    Tuple tuple = tuples.get(0);
-    String bucket1 = tuple.getString("level1_s");
-    String bucket2 = tuple.getString("level2_s");
-    Double sumi = tuple.getDouble("sum(a_i)");
-    Double count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket1);
-    assertEquals("b", bucket2);
-    assertEquals(35, sumi.longValue());
-    assertEquals(3, count, 0.1);
-
-    tuple = tuples.get(1);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket1);
-    assertEquals("b", bucket2);
-    assertEquals(15, sumi.longValue());
-    assertEquals(2, count, 0.1);
-
-    tuple = tuples.get(2);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket1);
-    assertEquals("b", bucket2);
-    assertEquals(11, sumi.longValue());
-    assertEquals(1, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(3);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket1);
-    assertEquals("a", bucket2);
-    assertEquals(4, sumi.longValue());
-    assertEquals(1, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(4);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket1);
-    assertEquals("a", bucket2);
-    assertEquals(3, sumi.longValue());
-    assertEquals(1, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(5);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket1);
-    assertEquals("a", bucket2);
-    assertEquals(2, sumi.longValue());
-    assertEquals(2, count.doubleValue(), 0.1);
-
-    sorts[0] =  new FieldComparator("level1_s", ComparatorOrder.DESCENDING );
-    sorts[1] =  new FieldComparator("level2_s", ComparatorOrder.DESCENDING );
-    facetStream = new FacetStream(
-        zkHost,
-        COLLECTIONORALIAS,
-        sParamsA,
-        buckets,
-        metrics,
-        sorts,
-        100);
-
-    tuples = getTuples(facetStream);
-    assertEquals(6, tuples.size());
-
-    tuple = tuples.get(0);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket1);
-    assertEquals("b", bucket2);
-    assertEquals(11, sumi.longValue());
-    assertEquals(1, count, 0.1);
-
-    tuple = tuples.get(1);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket1);
-    assertEquals("a", bucket2);
-    assertEquals(4, sumi.longValue());
-    assertEquals(1, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(2);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket1);
-    assertEquals("b", bucket2);
-    assertEquals(35, sumi.longValue());
-    assertEquals(3, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(3);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket1);
-    assertEquals("a", bucket2);
-    assertEquals(3, sumi.longValue());
-    assertEquals(1, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(4);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket1);
-    assertEquals("b", bucket2);
-    assertEquals(15, sumi.longValue());
-    assertEquals(2, count.doubleValue(), 0.1);
-
-    tuple = tuples.get(5);
-    bucket1 = tuple.getString("level1_s");
-    bucket2 = tuple.getString("level2_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket1);
-    assertEquals("a", bucket2);
-    assertEquals(2, sumi.longValue());
-    assertEquals(2, count.doubleValue(), 0.1);
-
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+
+      SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_i,a_f");
+
+      Bucket[] buckets = {new Bucket("level1_s"), new Bucket("level2_s")};
+
+      Metric[] metrics = {new SumMetric("a_i"),
+          new CountMetric()};
+
+      FieldComparator[] sorts = {new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING), new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING)};
+
+      FacetStream facetStream = new FacetStream(
+          zkHost,
+          COLLECTIONORALIAS,
+          sParamsA,
+          buckets,
+          metrics,
+          sorts,
+          100);
+      facetStream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(facetStream);
+      assertEquals(6, tuples.size());
+
+      Tuple tuple = tuples.get(0);
+      String bucket1 = tuple.getString("level1_s");
+      String bucket2 = tuple.getString("level2_s");
+      Double sumi = tuple.getDouble("sum(a_i)");
+      Double count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket1);
+      assertEquals("b", bucket2);
+      assertEquals(35, sumi.longValue());
+      assertEquals(3, count, 0.1);
+
+      tuple = tuples.get(1);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket1);
+      assertEquals("b", bucket2);
+      assertEquals(15, sumi.longValue());
+      assertEquals(2, count, 0.1);
+
+      tuple = tuples.get(2);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket1);
+      assertEquals("b", bucket2);
+      assertEquals(11, sumi.longValue());
+      assertEquals(1, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(3);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket1);
+      assertEquals("a", bucket2);
+      assertEquals(4, sumi.longValue());
+      assertEquals(1, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(4);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket1);
+      assertEquals("a", bucket2);
+      assertEquals(3, sumi.longValue());
+      assertEquals(1, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(5);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket1);
+      assertEquals("a", bucket2);
+      assertEquals(2, sumi.longValue());
+      assertEquals(2, count.doubleValue(), 0.1);
+
+      sorts[0] = new FieldComparator("level1_s", ComparatorOrder.DESCENDING);
+      sorts[1] = new FieldComparator("level2_s", ComparatorOrder.DESCENDING);
+      facetStream = new FacetStream(
+          zkHost,
+          COLLECTIONORALIAS,
+          sParamsA,
+          buckets,
+          metrics,
+          sorts,
+          100);
+      facetStream.setStreamContext(streamContext);
+      tuples = getTuples(facetStream);
+      assertEquals(6, tuples.size());
+
+      tuple = tuples.get(0);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket1);
+      assertEquals("b", bucket2);
+      assertEquals(11, sumi.longValue());
+      assertEquals(1, count, 0.1);
+
+      tuple = tuples.get(1);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket1);
+      assertEquals("a", bucket2);
+      assertEquals(4, sumi.longValue());
+      assertEquals(1, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(2);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket1);
+      assertEquals("b", bucket2);
+      assertEquals(35, sumi.longValue());
+      assertEquals(3, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(3);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket1);
+      assertEquals("a", bucket2);
+      assertEquals(3, sumi.longValue());
+      assertEquals(1, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(4);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket1);
+      assertEquals("b", bucket2);
+      assertEquals(15, sumi.longValue());
+      assertEquals(2, count.doubleValue(), 0.1);
+
+      tuple = tuples.get(5);
+      bucket1 = tuple.getString("level1_s");
+      bucket2 = tuple.getString("level2_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello0", bucket1);
+      assertEquals("a", bucket2);
+      assertEquals(2, sumi.longValue());
+      assertEquals(2, count.doubleValue(), 0.1);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -1413,166 +1537,174 @@ public void testTrace() throws Exception {
         .add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9")
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
-
-    SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc");
-    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-
-    Bucket[] buckets =  {new Bucket("a_s")};
-
-    Metric[] metrics = {new SumMetric("a_i"),
-                        new SumMetric("a_f"),
-                        new MinMetric("a_i"),
-                        new MinMetric("a_f"),
-                        new MaxMetric("a_i"),
-                        new MaxMetric("a_f"),
-                        new MeanMetric("a_i"),
-                        new MeanMetric("a_f"),
-                        new CountMetric()};
-
-    RollupStream rollupStream = new RollupStream(stream, buckets, metrics);
-    List<Tuple> tuples = getTuples(rollupStream);
-
-    assert(tuples.size() == 3);
-
-    //Test Long and Double Sums
-
-    Tuple tuple = tuples.get(0);
-    String bucket = tuple.getString("a_s");
-    Double sumi = tuple.getDouble("sum(a_i)");
-    Double sumf = tuple.getDouble("sum(a_f)");
-    Double mini = tuple.getDouble("min(a_i)");
-    Double minf = tuple.getDouble("min(a_f)");
-    Double maxi = tuple.getDouble("max(a_i)");
-    Double maxf = tuple.getDouble("max(a_f)");
-    Double avgi = tuple.getDouble("avg(a_i)");
-    Double avgf = tuple.getDouble("avg(a_f)");
-    Double count = tuple.getDouble("count(*)");
-
-
-    assertEquals("hello0", bucket);
-    assertEquals(17, sumi.doubleValue(), 0.001);
-    assertEquals(18, sumf.doubleValue(), 0.001);
-    assertEquals(0, mini.doubleValue(), 0.001);
-    assertEquals(1, minf.doubleValue(), 0.001);
-    assertEquals(14, maxi.doubleValue(), 0.001);
-    assertEquals(10, maxf.doubleValue(), 0.001);
-    assertEquals(4.25, avgi.doubleValue(), 0.001);
-    assertEquals(4.5, avgf.doubleValue(), 0.001);
-    assertEquals(4, count.doubleValue(), 0.001);
-
-
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket);
-    assertEquals(38, sumi.doubleValue(), 0.001);
-    assertEquals(26, sumf.doubleValue(), 0.001);
-    assertEquals(3, mini.doubleValue(), 0.001);
-    assertEquals(3, minf.doubleValue(), 0.001);
-    assertEquals(13, maxi.doubleValue(), 0.001);
-    assertEquals(9, maxf.doubleValue(), 0.001);
-    assertEquals(9.5, avgi.doubleValue(), 0.001);
-    assertEquals(6.5, avgf.doubleValue(), 0.001);
-    assertEquals(4, count.doubleValue(), 0.001);
-
-
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket);
-    assertEquals(15, sumi.longValue());
-    assertEquals(11, sumf.doubleValue(), 0.01);
-    assertEquals(4, mini.doubleValue(), 0.01);
-    assertEquals(4, minf.doubleValue(), 0.01);
-    assertEquals(11, maxi.doubleValue(), 0.01);
-    assertEquals(7, maxf.doubleValue(), 0.01);
-    assertEquals(7.5, avgi.doubleValue(), 0.01);
-    assertEquals(5.5, avgf.doubleValue(), 0.01);
-    assertEquals(2, count.doubleValue(), 0.01);
-
-    // Test will null metrics
-    rollupStream = new RollupStream(stream, buckets, metrics);
-    tuples = getTuples(rollupStream);
-
-    assert(tuples.size() == 3);
-    tuple = tuples.get(0);
-    bucket = tuple.getString("a_s");
-    assertTrue(bucket.equals("hello0"));
-
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    assertTrue(bucket.equals("hello3"));
-
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    assertTrue(bucket.equals("hello4"));
-
-
-    //Test will null value in the grouping field
-    new UpdateRequest()
-        .add(id, "12", "a_s", null, "a_i", "14", "a_f", "10")
-        .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
-
-    sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "qt", "/export");
-    stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-
-    Bucket[] buckets1 =  {new Bucket("a_s")};
-
-    Metric[] metrics1 = {new SumMetric("a_i"),
-        new SumMetric("a_f"),
-        new MinMetric("a_i"),
-        new MinMetric("a_f"),
-        new MaxMetric("a_i"),
-        new MaxMetric("a_f"),
-        new MeanMetric("a_i"),
-        new MeanMetric("a_f"),
-        new CountMetric()};
-
-    rollupStream = new RollupStream(stream, buckets1, metrics1);
-    tuples = getTuples(rollupStream);
-    //Check that we've got the extra NULL bucket
-    assertEquals(4, tuples.size());
-    tuple = tuples.get(0);
-    assertEquals("NULL", tuple.getString("a_s"));
-
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals(14, sumi.doubleValue(), 0.01);
-    assertEquals(10, sumf.doubleValue(), 0.01);
-    assertEquals(14, mini.doubleValue(), 0.01);
-    assertEquals(10, minf.doubleValue(), 0.01);
-    assertEquals(14, maxi.doubleValue(), 0.01);
-    assertEquals(10, maxf.doubleValue(), 0.01);
-    assertEquals(14, avgi.doubleValue(), 0.01);
-    assertEquals(10, avgf.doubleValue(), 0.01);
-    assertEquals(1, count.doubleValue(), 0.01);
-
+    StreamContext streamContext = new StreamContext();
+    SolrClientCache solrClientCache = new SolrClientCache();
+    streamContext.setSolrClientCache(solrClientCache);
+
+    try {
+      SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc");
+      CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+
+      Bucket[] buckets = {new Bucket("a_s")};
+
+      Metric[] metrics = {new SumMetric("a_i"),
+          new SumMetric("a_f"),
+          new MinMetric("a_i"),
+          new MinMetric("a_f"),
+          new MaxMetric("a_i"),
+          new MaxMetric("a_f"),
+          new MeanMetric("a_i"),
+          new MeanMetric("a_f"),
+          new CountMetric()};
+
+      RollupStream rollupStream = new RollupStream(stream, buckets, metrics);
+      rollupStream.setStreamContext(streamContext);
+      List<Tuple> tuples = getTuples(rollupStream);
+
+      assert (tuples.size() == 3);
+
+      //Test Long and Double Sums
+
+      Tuple tuple = tuples.get(0);
+      String bucket = tuple.getString("a_s");
+      Double sumi = tuple.getDouble("sum(a_i)");
+      Double sumf = tuple.getDouble("sum(a_f)");
+      Double mini = tuple.getDouble("min(a_i)");
+      Double minf = tuple.getDouble("min(a_f)");
+      Double maxi = tuple.getDouble("max(a_i)");
+      Double maxf = tuple.getDouble("max(a_f)");
+      Double avgi = tuple.getDouble("avg(a_i)");
+      Double avgf = tuple.getDouble("avg(a_f)");
+      Double count = tuple.getDouble("count(*)");
+
+
+      assertEquals("hello0", bucket);
+      assertEquals(17, sumi.doubleValue(), 0.001);
+      assertEquals(18, sumf.doubleValue(), 0.001);
+      assertEquals(0, mini.doubleValue(), 0.001);
+      assertEquals(1, minf.doubleValue(), 0.001);
+      assertEquals(14, maxi.doubleValue(), 0.001);
+      assertEquals(10, maxf.doubleValue(), 0.001);
+      assertEquals(4.25, avgi.doubleValue(), 0.001);
+      assertEquals(4.5, avgf.doubleValue(), 0.001);
+      assertEquals(4, count.doubleValue(), 0.001);
+
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello3", bucket);
+      assertEquals(38, sumi.doubleValue(), 0.001);
+      assertEquals(26, sumf.doubleValue(), 0.001);
+      assertEquals(3, mini.doubleValue(), 0.001);
+      assertEquals(3, minf.doubleValue(), 0.001);
+      assertEquals(13, maxi.doubleValue(), 0.001);
+      assertEquals(9, maxf.doubleValue(), 0.001);
+      assertEquals(9.5, avgi.doubleValue(), 0.001);
+      assertEquals(6.5, avgf.doubleValue(), 0.001);
+      assertEquals(4, count.doubleValue(), 0.001);
+
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals("hello4", bucket);
+      assertEquals(15, sumi.longValue());
+      assertEquals(11, sumf.doubleValue(), 0.01);
+      assertEquals(4, mini.doubleValue(), 0.01);
+      assertEquals(4, minf.doubleValue(), 0.01);
+      assertEquals(11, maxi.doubleValue(), 0.01);
+      assertEquals(7, maxf.doubleValue(), 0.01);
+      assertEquals(7.5, avgi.doubleValue(), 0.01);
+      assertEquals(5.5, avgf.doubleValue(), 0.01);
+      assertEquals(2, count.doubleValue(), 0.01);
+
+      // Test will null metrics
+      rollupStream = new RollupStream(stream, buckets, metrics);
+      rollupStream.setStreamContext(streamContext);
+      tuples = getTuples(rollupStream);
+
+      assert (tuples.size() == 3);
+      tuple = tuples.get(0);
+      bucket = tuple.getString("a_s");
+      assertTrue(bucket.equals("hello0"));
+
+      tuple = tuples.get(1);
+      bucket = tuple.getString("a_s");
+      assertTrue(bucket.equals("hello3"));
+
+      tuple = tuples.get(2);
+      bucket = tuple.getString("a_s");
+      assertTrue(bucket.equals("hello4"));
+
+
+      //Test will null value in the grouping field
+      new UpdateRequest()
+          .add(id, "12", "a_s", null, "a_i", "14", "a_f", "10")
+          .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+
+      sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "qt", "/export");
+      stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
+      Bucket[] buckets1 = {new Bucket("a_s")};
+
+      Metric[] metrics1 = {new SumMetric("a_i"),
+          new SumMetric("a_f"),
+          new MinMetric("a_i"),
+          new MinMetric("a_f"),
+          new MaxMetric("a_i"),
+          new MaxMetric("a_f"),
+          new MeanMetric("a_i"),
+          new MeanMetric("a_f"),
+          new CountMetric()};
+
+      rollupStream = new RollupStream(stream, buckets1, metrics1);
+      rollupStream.setStreamContext(streamContext);
+      tuples = getTuples(rollupStream);
+      //Check that we've got the extra NULL bucket
+      assertEquals(4, tuples.size());
+      tuple = tuples.get(0);
+      assertEquals("NULL", tuple.getString("a_s"));
+
+      sumi = tuple.getDouble("sum(a_i)");
+      sumf = tuple.getDouble("sum(a_f)");
+      mini = tuple.getDouble("min(a_i)");
+      minf = tuple.getDouble("min(a_f)");
+      maxi = tuple.getDouble("max(a_i)");
+      maxf = tuple.getDouble("max(a_f)");
+      avgi = tuple.getDouble("avg(a_i)");
+      avgf = tuple.getDouble("avg(a_f)");
+      count = tuple.getDouble("count(*)");
+
+      assertEquals(14, sumi.doubleValue(), 0.01);
+      assertEquals(10, sumf.doubleValue(), 0.01);
+      assertEquals(14, mini.doubleValue(), 0.01);
+      assertEquals(10, minf.doubleValue(), 0.01);
+      assertEquals(14, maxi.doubleValue(), 0.01);
+      assertEquals(10, maxf.doubleValue(), 0.01);
+      assertEquals(14, avgi.doubleValue(), 0.01);
+      assertEquals(10, avgf.doubleValue(), 0.01);
+      assertEquals(1, count.doubleValue(), 0.01);
+    } finally {
+      solrClientCache.close();
+    }
   }
 
   @Test
@@ -1583,66 +1715,71 @@ public void testTrace() throws Exception {
     SolrClientCache cache = new SolrClientCache();
     context.setSolrClientCache(cache);
 
-    SolrParams sParams = mapParams("q", "a_s:hello0", "rows", "500", "fl", "id");
+    try {
+      SolrParams sParams = mapParams("q", "a_s:hello0", "rows", "500", "fl", "id");
 
-    TopicStream topicStream = new TopicStream(zkHost,
-        COLLECTIONORALIAS,
-        COLLECTIONORALIAS,
-                                              "50000000",
-                                              -1,
-                                              1000000, sParams);
+      TopicStream topicStream = new TopicStream(zkHost,
+          COLLECTIONORALIAS,
+          COLLECTIONORALIAS,
+          "50000000",
+          -1,
+          1000000, sParams);
 
-    DaemonStream daemonStream = new DaemonStream(topicStream, "daemon1", 1000, 500);
-    daemonStream.setStreamContext(context);
+      DaemonStream daemonStream = new DaemonStream(topicStream, "daemon1", 1000, 500);
+      daemonStream.setStreamContext(context);
 
-    daemonStream.open();
+      daemonStream.open();
 
-    // Wait for the checkpoint
-    JettySolrRunner jetty = cluster.getJettySolrRunners().get(0);
+      // Wait for the checkpoint
+      JettySolrRunner jetty = cluster.getJettySolrRunners().get(0);
 
 
-    SolrParams sParams1 = mapParams("qt", "/get", "ids", "50000000", "fl", "id");
-    int count = 0;
-    while(count == 0) {
-      SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/" + COLLECTIONORALIAS, sParams1);
-      List<Tuple> tuples = getTuples(solrStream);
-      count = tuples.size();
-      if(count > 0) {
-        Tuple t = tuples.get(0);
-        assertTrue(t.getLong("id") == 50000000);
-      } else {
-        System.out.println("###### Waiting for checkpoint #######:" + count);
+      SolrParams sParams1 = mapParams("qt", "/get", "ids", "50000000", "fl", "id");
+      int count = 0;
+      while (count == 0) {
+        SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/" + COLLECTIONORALIAS, sParams1);
+        solrStream.setStreamContext(context);
+        List<Tuple> tuples = getTuples(solrStream);
+        count = tuples.size();
+        if (count > 0) {
+          Tuple t = tuples.get(0);
+          assertTrue(t.getLong("id") == 50000000);
+        } else {
+          System.out.println("###### Waiting for checkpoint #######:" + count);
+        }
       }
-    }
 
-    new UpdateRequest()
-        .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1")
-        .add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2")
-        .add(id, "3", "a_s", "hello0", "a_i", "3", "a_f", "3")
-        .add(id, "4", "a_s", "hello0", "a_i", "4", "a_f", "4")
-        .add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5")
-        .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+      new UpdateRequest()
+          .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1")
+          .add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2")
+          .add(id, "3", "a_s", "hello0", "a_i", "3", "a_f", "3")
+          .add(id, "4", "a_s", "hello0", "a_i", "4", "a_f", "4")
+          .add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5")
+          .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    for(int i=0; i<5; i++) {
-      daemonStream.read();
-    }
+      for (int i = 0; i < 5; i++) {
+        daemonStream.read();
+      }
 
-    new UpdateRequest()
-        .add(id, "5", "a_s", "hello0", "a_i", "4", "a_f", "4")
-        .add(id, "6", "a_s", "hello0", "a_i", "4", "a_f", "4")
-        .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+      new UpdateRequest()
+          .add(id, "5", "a_s", "hello0", "a_i", "4", "a_f", "4")
+          .add(id, "6", "a_s", "hello0", "a_i", "4", "a_f", "4")
+          .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    for(int i=0; i<2; i++) {
-      daemonStream.read();
-    }
+      for (int i = 0; i < 2; i++) {
+        daemonStream.read();
+      }
+
+      daemonStream.shutdown();
 
-    daemonStream.shutdown();
+      Tuple tuple = daemonStream.read();
 
-    Tuple tuple = daemonStream.read();
+      assertTrue(tuple.EOF);
+      daemonStream.close();
+    } finally {
+      cache.close();
+    }
 
-    assertTrue(tuple.EOF);
-    daemonStream.close();
-    cache.close();
 
   }
 
@@ -1662,99 +1799,107 @@ public void testTrace() throws Exception {
         .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10")
         .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "partitionKeys", "a_s");
-    CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
-
-    Bucket[] buckets =  {new Bucket("a_s")};
-
-    Metric[] metrics = {new SumMetric("a_i"),
-                        new SumMetric("a_f"),
-                        new MinMetric("a_i"),
-                        new MinMetric("a_f"),
-                        new MaxMetric("a_i"),
-                        new MaxMetric("a_f"),
-                        new MeanMetric("a_i"),
-                        new MeanMetric("a_f"),
-                        new CountMetric()};
-
-    RollupStream rollupStream = new RollupStream(stream, buckets, metrics);
-    ParallelStream parallelStream = parallelStream(rollupStream, new FieldComparator("a_s", ComparatorOrder.ASCENDING));
-    attachStreamFactory(parallelStream);
-    List<Tuple> tuples = getTuples(parallelStream);
-
-    assertEquals(3, tuples.size());
-
-    //Test Long and Double Sums
-
-    Tuple tuple = tuples.get(0);
-    String bucket = tuple.getString("a_s");
-    Double sumi = tuple.getDouble("sum(a_i)");
-    Double sumf = tuple.getDouble("sum(a_f)");
-    Double mini = tuple.getDouble("min(a_i)");
-    Double minf = tuple.getDouble("min(a_f)");
-    Double maxi = tuple.getDouble("max(a_i)");
-    Double maxf = tuple.getDouble("max(a_f)");
-    Double avgi = tuple.getDouble("avg(a_i)");
-    Double avgf = tuple.getDouble("avg(a_f)");
-    Double count = tuple.getDouble("count(*)");
-
-    assertEquals("hello0", bucket);
-    assertEquals(17, sumi.doubleValue(), 0.001);
-    assertEquals(18, sumf.doubleValue(), 0.001);
-    assertEquals(0, mini.doubleValue(), 0.001);
-    assertEquals(1, minf.doubleValue(), 0.001);
-    assertEquals(14, maxi.doubleValue(), 0.001);
-    assertEquals(10, maxf.doubleValue(), 0.001);
-    assertEquals(4.25, avgi.doubleValue(), 0.001);
-    assertEquals(4.5, avgf.doubleValue(), 0.001);
-    assertEquals(4, count.doubleValue(), 0.001);
-
-    tuple = tuples.get(1);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello3", bucket);
-    assertEquals(38, sumi.doubleValue(), 0.001);
-    assertEquals(26, sumf.doubleValue(), 0.001);
-    assertEquals(3, mini.doubleValue(), 0.001);
-    assertEquals(3, minf.doubleValue(), 0.001);
-    assertEquals(13, maxi.doubleValue(), 0.001);
-    assertEquals(9, maxf.doubleValue(), 0.001);
-    assertEquals(9.5, avgi.doubleValue(), 0.001);
-    assertEquals(6.5, avgf.doubleValue(), 0.001);
-    assertEquals(4, count.doubleValue(), 0.001);
-
-    tuple = tuples.get(2);
-    bucket = tuple.getString("a_s");
-    sumi = tuple.getDouble("sum(a_i)");
-    sumf = tuple.getDouble("sum(a_f)");
-    mini = tuple.getDouble("min(a_i)");
-    minf = tuple.getDouble("min(a_f)");
-    maxi = tuple.getDouble("max(a_i)");
-    maxf = tuple.getDouble("max(a_f)");
-    avgi = tuple.getDouble("avg(a_i)");
-    avgf = tuple.getDouble("avg(a_f)");
-    count = tuple.getDouble("count(*)");
-
-    assertEquals("hello4", bucket);
-    assertEquals(15, sumi.longValue());
-    assertEquals(11, sumf.doubleValue(), 0.001);
-    assertEq

<TRUNCATED>

[18/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index d74650f..8b8269b 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -82,6 +82,7 @@ import org.apache.solr.metrics.SolrCoreMetricManager;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.search.SolrFieldCacheBean;
 import org.apache.solr.security.AuthenticationPlugin;
 import org.apache.solr.security.AuthorizationPlugin;
 import org.apache.solr.security.HttpClientBuilderPlugin;
@@ -122,7 +123,7 @@ public class CoreContainer {
     public final Exception exception;
 
     public CoreLoadFailure(CoreDescriptor cd, Exception loadFailure) {
-      this.cd = cd;
+      this.cd = new CoreDescriptor(cd.getName(), cd);
       this.exception = loadFailure;
     }
   }
@@ -216,8 +217,6 @@ public class CoreContainer {
     return this.containerHandlers;
   }
 
- // private ClientConnectionManager clientConnectionManager = new PoolingClientConnectionManager();
-
   {
     log.debug("New CoreContainer " + System.identityHashCode(this));
   }
@@ -485,18 +484,18 @@ public class CoreContainer {
     metricManager = new SolrMetricManager();
 
     coreContainerWorkExecutor = MetricUtils.instrumentedExecutorService(
-        coreContainerWorkExecutor,
-        metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node)),
-        SolrMetricManager.mkName("coreContainerWorkExecutor", SolrInfoMBean.Category.CONTAINER.toString(), "threadPool"));
+        coreContainerWorkExecutor, null,
+        metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)),
+        SolrMetricManager.mkName("coreContainerWorkExecutor", SolrInfoBean.Category.CONTAINER.toString(), "threadPool"));
 
     shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader);
     if (shardHandlerFactory instanceof SolrMetricProducer) {
       SolrMetricProducer metricProducer = (SolrMetricProducer) shardHandlerFactory;
-      metricProducer.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), "httpShardHandler");
+      metricProducer.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), "httpShardHandler");
     }
 
     updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig());
-    updateShardHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), "updateShardHandler");
+    updateShardHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), "updateShardHandler");
 
     transientCoreCache = TransientSolrCoreCacheFactory.newInstance(loader, this);
 
@@ -526,14 +525,14 @@ public class CoreContainer {
     autoScalingHandler = createHandler(AutoScalingHandler.HANDLER_PATH, AutoScalingHandler.class.getName(), AutoScalingHandler.class);
 
     containerHandlers.put(AUTHZ_PATH, securityConfHandler);
-    securityConfHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), AUTHZ_PATH);
+    securityConfHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), AUTHZ_PATH);
     containerHandlers.put(AUTHC_PATH, securityConfHandler);
     if(pkiAuthenticationPlugin != null)
       containerHandlers.put(PKIAuthenticationPlugin.PATH, pkiAuthenticationPlugin.getRequestHandler());
 
-    metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.node);
-    metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jvm);
-    metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jetty);
+    metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoBean.Group.node);
+    metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoBean.Group.jvm);
+    metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoBean.Group.jetty);
 
     coreConfigService = ConfigSetService.createConfigSetService(cfg, loader, zkSys.zkController);
 
@@ -541,17 +540,25 @@ public class CoreContainer {
 
     // initialize gauges for reporting the number of cores and disk total/free
 
-    String registryName = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node);
-    metricManager.registerGauge(registryName, () -> solrCores.getCores().size(),
-        true, "loaded", SolrInfoMBean.Category.CONTAINER.toString(), "cores");
-    metricManager.registerGauge(registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
-        true, "lazy",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
-    metricManager.registerGauge(registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
-        true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
-    metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
-        true, "totalSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
-    metricManager.registerGauge(registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
-        true, "usableSpace", SolrInfoMBean.Category.CONTAINER.toString(), "fs");
+    String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node);
+    metricManager.registerGauge(null, registryName, () -> solrCores.getCores().size(),
+        true, "loaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
+    metricManager.registerGauge(null, registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
+        true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores");
+    metricManager.registerGauge(null, registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
+        true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
+    metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
+        true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
+    metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
+        true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
+    // add version information
+    metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getSpecificationVersion(),
+        true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version");
+    metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getImplementationVersion(),
+        true, "implementation", SolrInfoBean.Category.CONTAINER.toString(), "version");
+
+    SolrFieldCacheBean fieldCacheBean = new SolrFieldCacheBean();
+    fieldCacheBean.initializeMetrics(metricManager, registryName, null);
 
     if (isZooKeeperAware()) {
       metricManager.loadClusterReporters(cfg.getMetricReporterPlugins(), this);
@@ -561,9 +568,9 @@ public class CoreContainer {
     ExecutorService coreLoadExecutor = MetricUtils.instrumentedExecutorService(
         ExecutorUtil.newMDCAwareFixedThreadPool(
             cfg.getCoreLoadThreadCount(isZooKeeperAware()),
-            new DefaultSolrThreadFactory("coreLoadExecutor")),
-        metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node)),
-        SolrMetricManager.mkName("coreLoadExecutor",SolrInfoMBean.Category.CONTAINER.toString(), "threadPool"));
+            new DefaultSolrThreadFactory("coreLoadExecutor")), null,
+        metricManager.registry(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)),
+        SolrMetricManager.mkName("coreLoadExecutor", SolrInfoBean.Category.CONTAINER.toString(), "threadPool"));
     final List<Future<SolrCore>> futures = new ArrayList<>();
     try {
       List<CoreDescriptor> cds = coresLocator.discover(this);
@@ -577,7 +584,7 @@ public class CoreContainer {
 
       for (final CoreDescriptor cd : cds) {
         if (cd.isTransient() || !cd.isLoadOnStartup()) {
-          solrCores.putDynamicDescriptor(cd.getName(), cd);
+          getTransientCacheHandler().addTransientDescriptor(cd.getName(), cd);
         } else if (asyncSolrCoreLoad) {
           solrCores.markCoreAsLoading(cd);
         }
@@ -691,14 +698,16 @@ public class CoreContainer {
 
     ExecutorUtil.shutdownAndAwaitTermination(coreContainerWorkExecutor);
     if (metricManager != null) {
-      metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
+      metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
+      metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
+      metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jetty));
     }
 
     if (isZooKeeperAware()) {
       cancelCoreRecoveries();
       zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
       if (metricManager != null) {
-        metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.cluster));
+        metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.cluster));
       }
     }
 
@@ -810,50 +819,35 @@ public class CoreContainer {
     return coresLocator;
   }
 
-  protected SolrCore registerCore(String name, SolrCore core, boolean registerInZk, boolean skipRecovery) {
+  protected SolrCore registerCore(CoreDescriptor cd, SolrCore core, boolean registerInZk, boolean skipRecovery) {
     if( core == null ) {
       throw new RuntimeException( "Can not register a null core." );
     }
-
-    // We can register a core when creating them via the admin UI, so we need to ensure that the dynamic descriptors
-    // are up to date
-    CoreDescriptor cd = core.getCoreDescriptor();
-    if ((cd.isTransient() || ! cd.isLoadOnStartup())
-        && solrCores.getDynamicDescriptor(name) == null) {
-      // Store it away for later use. includes non-transient but not
-      // loaded at startup cores.
-      solrCores.putDynamicDescriptor(name, cd);
-    }
-
-    SolrCore old;
-
+    
     if (isShutDown) {
       core.close();
       throw new IllegalStateException("This CoreContainer has been closed");
     }
-    if (cd.isTransient()) {
-      old = solrCores.putTransientCore(cfg, name, core, loader);
-    } else {
-      old = solrCores.putCore(name, core);
-    }
+    SolrCore old = solrCores.putCore(cd, core);
       /*
       * set both the name of the descriptor and the name of the
       * core, since the descriptors name is used for persisting.
       */
 
-    core.setName(name);
+    solrCores.addCoreDescriptor(new CoreDescriptor(cd.getName(), cd));
+    core.setName(cd.getName());
 
-    coreInitFailures.remove(name);
+    coreInitFailures.remove(cd.getName());
 
     if( old == null || old == core) {
-      log.debug( "registering core: "+name );
+      log.debug( "registering core: " + cd.getName() );
       if (registerInZk) {
         zkSys.registerInZk(core, false, skipRecovery);
       }
       return null;
     }
     else {
-      log.debug( "replacing core: "+name );
+      log.debug( "replacing core: " + cd.getName() );
       old.close();
       if (registerInZk) {
         zkSys.registerInZk(core, false, skipRecovery);
@@ -881,10 +875,10 @@ public class CoreContainer {
    */
   public SolrCore create(String coreName, Path instancePath, Map<String, String> parameters, boolean newCollection) {
 
-    CoreDescriptor cd = new CoreDescriptor(this, coreName, instancePath, parameters);
+    CoreDescriptor cd = new CoreDescriptor(coreName, instancePath, parameters, getContainerProperties(), isZooKeeperAware());
 
     // TODO: There's a race here, isn't there?
-    if (getAllCoreNames().contains(coreName)) {
+    if (getLoadedCoreNames().contains(coreName)) {
       log.warn("Creating a core with existing name is not allowed");
       // TODO: Shouldn't this be a BAD_REQUEST?
       throw new SolrException(ErrorCode.SERVER_ERROR, "Core with name '" + coreName + "' already exists.");
@@ -957,7 +951,7 @@ public class CoreContainer {
 
     SolrCore core = null;
     try {
-      MDCLoggingContext.setCoreDescriptor(dcore);
+      MDCLoggingContext.setCoreDescriptor(this, dcore);
       SolrIdentifierValidator.validateCoreName(dcore.getName());
       if (zkSys.getZkController() != null) {
         zkSys.getZkController().preRegister(dcore);
@@ -967,7 +961,7 @@ public class CoreContainer {
       dcore.setConfigSetTrusted(coreConfig.isTrusted());
       log.info("Creating SolrCore '{}' using configuration from {}, trusted={}", dcore.getName(), coreConfig.getName(), dcore.isConfigSetTrusted());
       try {
-        core = new SolrCore(dcore, coreConfig);
+        core = new SolrCore(this, dcore, coreConfig);
       } catch (SolrException e) {
         core = processCoreCreateException(e, dcore, coreConfig);
       }
@@ -977,7 +971,7 @@ public class CoreContainer {
         core.getUpdateHandler().getUpdateLog().recoverFromLog();
       }
 
-      registerCore(dcore.getName(), core, publishState, newCollection);
+      registerCore(dcore, core, publishState, newCollection);
 
       return core;
     } catch (Exception e) {
@@ -1041,7 +1035,7 @@ public class CoreContainer {
             if (leader != null && leader.getState() == State.ACTIVE) {
               log.info("Found active leader, will attempt to create fresh core and recover.");
               resetIndexDirectory(dcore, coreConfig);
-              return new SolrCore(dcore, coreConfig);
+              return new SolrCore(this, dcore, coreConfig);
             }
           } catch (SolrException se) {
             se.addSuppressed(original);
@@ -1064,7 +1058,7 @@ public class CoreContainer {
   private void resetIndexDirectory(CoreDescriptor dcore, ConfigSet coreConfig) {
     SolrConfig config = coreConfig.getSolrConfig();
 
-    String registryName = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, dcore.getName());
+    String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, dcore.getName());
     DirectoryFactory df = DirectoryFactory.loadDirectoryFactory(config, this, registryName);
     String dataDir = SolrCore.findDataDir(df, null, config, dcore);
 
@@ -1097,7 +1091,7 @@ public class CoreContainer {
   /**
    * @return a Collection of the names that loaded cores are mapped to
    */
-  public Collection<String> getCoreNames() {
+  public Collection<String> getLoadedCoreNames() {
     return solrCores.getLoadedCoreNames();
   }
 
@@ -1153,13 +1147,18 @@ public class CoreContainer {
   public void reload(String name) {
     SolrCore core = solrCores.getCoreFromAnyList(name, false);
     if (core != null) {
-      CoreDescriptor cd = core.getCoreDescriptor();
+      // The underlying core properties files may have changed, we don't really know. So we have a (perhaps) stale
+      // CoreDescriptor we need to reload it if it's out there. 
+      CorePropertiesLocator cpl = new CorePropertiesLocator(null);
+      CoreDescriptor cd = cpl.reload(this, core.getCoreDescriptor());
+      if (cd == null) cd = core.getCoreDescriptor();
+      solrCores.addCoreDescriptor(cd);
       try {
         solrCores.waitAddPendingCoreOps(cd.getName());
         ConfigSet coreConfig = coreConfigService.getConfig(cd);
         log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName());
         SolrCore newCore = core.reload(coreConfig);
-        registerCore(cd.getName(), newCore, false, false);
+        registerCore(cd, newCore, false, false);
         if (getZkController() != null) {
           boolean onlyLeaderIndexes = getZkController().getClusterState().getCollection(cd.getCollectionName()).getRealtimeReplicas() == 1;
           if (onlyLeaderIndexes && !cd.getCloudDescriptor().isLeader()) {
@@ -1238,7 +1237,6 @@ public class CoreContainer {
     boolean close = solrCores.isLoadedNotPendingClose(name);
     SolrCore core = solrCores.remove(name);
     coresLocator.delete(this, cd);
-
     if (core == null) {
       // transient core
       SolrCore.deleteUnloadedCore(cd, deleteDataDir, deleteInstanceDir);
@@ -1253,7 +1251,7 @@ public class CoreContainer {
       core.getSolrCoreState().cancelRecovery();
     }
     
-    core.unloadOnClose(deleteIndexDir, deleteDataDir, deleteInstanceDir);
+    core.unloadOnClose(cd, deleteIndexDir, deleteDataDir, deleteInstanceDir);
     if (close)
       core.closeAndWait();
 
@@ -1267,6 +1265,9 @@ public class CoreContainer {
         throw new SolrException(ErrorCode.SERVER_ERROR, "Error unregistering core [" + name + "] from cloud state", e);
       }
     }
+    if (deleteInstanceDir) { // we aren't going to reload this if we delete the instance dir.
+      solrCores.removeCoreDescriptor(cd);
+    }
   }
 
   public void rename(String name, String toName) {
@@ -1276,8 +1277,15 @@ public class CoreContainer {
         String oldRegistryName = core.getCoreMetricManager().getRegistryName();
         String newRegistryName = SolrCoreMetricManager.createRegistryName(core, toName);
         metricManager.swapRegistries(oldRegistryName, newRegistryName);
-        registerCore(toName, core, true, false);
+        // The old coreDescriptor is obsolete, so remove it. registerCore will put it back.
+        CoreDescriptor cd = core.getCoreDescriptor();
+        solrCores.removeCoreDescriptor(cd);
+        cd.setProperty("name", toName);
+        solrCores.addCoreDescriptor(cd);
+        core.setName(toName);
+        registerCore(cd, core, true, false);
         SolrCore old = solrCores.remove(name);
+
         coresLocator.rename(this, old.getCoreDescriptor(), core.getCoreDescriptor());
       }
     }
@@ -1292,12 +1300,7 @@ public class CoreContainer {
   }
 
   public CoreDescriptor getCoreDescriptor(String coreName) {
-    // TODO make this less hideous!
-    for (CoreDescriptor cd : getCoreDescriptors()) {
-      if (cd.getName().equals(coreName))
-        return cd;
-    }
-    return null;
+    return solrCores.getCoreDescriptor(coreName);
   }
 
   public Path getCoreRootDirectory() {
@@ -1317,29 +1320,32 @@ public class CoreContainer {
     // Do this in two phases since we don't want to lock access to the cores over a load.
     SolrCore core = solrCores.getCoreFromAnyList(name, true);
 
+    // If a core is loaded, we're done just return it.
     if (core != null) {
       return core;
     }
 
-    // OK, it's not presently in any list, is it in the list of dynamic cores but not loaded yet? If so, load it.
-    CoreDescriptor desc = solrCores.getDynamicDescriptor(name);
-    if (desc == null) { //Nope, no transient core with this name
+    // If it's not yet loaded, we can check if it's had a core init failure and "do the right thing"
+    CoreDescriptor desc = solrCores.getCoreDescriptor(name);
 
-      // if there was an error initializing this core, throw a 500
-      // error with the details for clients attempting to access it.
-      CoreLoadFailure loadFailure = getCoreInitFailures().get(name);
-      if (null != loadFailure) {
-        throw new SolrCoreInitializationException(name, loadFailure.exception);
-      }
-      // otherwise the user is simply asking for something that doesn't exist.
-      return null;
+    // if there was an error initializing this core, throw a 500
+    // error with the details for clients attempting to access it.
+    CoreLoadFailure loadFailure = getCoreInitFailures().get(name);
+    if (null != loadFailure) {
+      throw new SolrCoreInitializationException(name, loadFailure.exception);
     }
-
+    // This is a bit of awkwardness where SolrCloud and transient cores don't play nice together. For transient cores,
+    // we have to allow them to be created at any time there hasn't been a core load failure (use reload to cure that).
+    // But for TestConfigSetsAPI.testUploadWithScriptUpdateProcessor, this needs to _not_ try to load the core if
+    // the core is null and there was an error. If you change this, be sure to run both TestConfiSetsAPI and 
+    // TestLazyCores
+    if (desc == null || zkSys.getZkController() != null) return null;
+    
     // This will put an entry in pending core ops if the core isn't loaded
     core = solrCores.waitAddPendingCoreOps(name);
 
     if (isShutDown) return null; // We're quitting, so stop. This needs to be after the wait above since we may come off
-                                 // the wait as a consequence of shutting down.
+    // the wait as a consequence of shutting down.
     try {
       if (core == null) {
         if (zkSys.getZkController() != null) {
@@ -1382,7 +1388,7 @@ public class CoreContainer {
       containerHandlers.put(path, (SolrRequestHandler)handler);
     }
     if (handler instanceof SolrMetricProducer) {
-      ((SolrMetricProducer)handler).initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), path);
+      ((SolrMetricProducer)handler).initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), path);
     }
     return handler;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
index 0dc2c71..1747fa2 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
@@ -121,8 +121,6 @@ public class CoreDescriptor {
       CloudDescriptor.NUM_SHARDS
   );
 
-  private final CoreContainer coreContainer;
-
   private final CloudDescriptor cloudDesc;
 
   private final Path instanceDir;
@@ -139,8 +137,9 @@ public class CoreDescriptor {
   /** The properties for this core, substitutable by resource loaders */
   protected final Properties substitutableProperties = new Properties();
 
-  public CoreDescriptor(CoreContainer container, String name, Path instanceDir, String... properties) {
-    this(container, name, instanceDir, toMap(properties));
+  public CoreDescriptor(String name, Path instanceDir, Properties containerProperties,
+                        boolean isZooKeeperAware, String... properties) {
+    this(name, instanceDir, toMap(properties), containerProperties, isZooKeeperAware);
   }
 
   private static Map<String, String> toMap(String... properties) {
@@ -154,12 +153,14 @@ public class CoreDescriptor {
 
   /**
    * Create a new CoreDescriptor with a given name and instancedir
-   * @param container     the CoreDescriptor's container
    * @param name          the CoreDescriptor's name
    * @param instanceDir   the CoreDescriptor's instancedir
+   * @param containerProperties the enclosing container properties for variable resolution
+   * @param isZooKeeperAware whether we are part of SolrCloud or not. 
    */
-  public CoreDescriptor(CoreContainer container, String name, Path instanceDir) {
-    this(container, name, instanceDir, Collections.emptyMap());
+  public CoreDescriptor(String name, Path instanceDir,
+                        Properties containerProperties, boolean isZooKeeperAware) {
+    this(name, instanceDir, Collections.emptyMap(), containerProperties, isZooKeeperAware);
   }
 
   /**
@@ -168,7 +169,6 @@ public class CoreDescriptor {
    * @param other    the CoreDescriptor to copy
    */
   public CoreDescriptor(String coreName, CoreDescriptor other) {
-    this.coreContainer = other.coreContainer;
     this.cloudDesc = other.cloudDesc;
     this.instanceDir = other.instanceDir;
     this.originalExtraProperties.putAll(other.originalExtraProperties);
@@ -183,20 +183,20 @@ public class CoreDescriptor {
 
   /**
    * Create a new CoreDescriptor.
-   * @param container       the CoreDescriptor's container
    * @param name            the CoreDescriptor's name
    * @param instanceDir     a Path resolving to the instanceDir
    * @param coreProps       a Map of the properties for this core
+   * @param containerProperties the properties from the enclosing container.
+   * @param isZooKeeperAware if true, we ar in SolrCloud mode.
    */
-  public CoreDescriptor(CoreContainer container, String name, Path instanceDir,
-                        Map<String, String> coreProps) {
 
-    this.coreContainer = container;
+
+  public CoreDescriptor(String name, Path instanceDir, Map<String, String> coreProps,
+                        Properties containerProperties, boolean isZooKeeperAware) {
     this.instanceDir = instanceDir;
 
     originalCoreProperties.setProperty(CORE_NAME, name);
 
-    Properties containerProperties = container.getContainerProperties();
     name = PropertiesUtil.substituteProperty(checkPropertyIsNotEmpty(name, CORE_NAME),
                                              containerProperties);
 
@@ -221,7 +221,7 @@ public class CoreDescriptor {
     buildSubstitutableProperties();
 
     // TODO maybe make this a CloudCoreDescriptor subclass?
-    if (container.isZooKeeperAware()) {
+    if (isZooKeeperAware) {
       cloudDesc = new CloudDescriptor(name, coreProperties, this);
     }
     else {
@@ -324,15 +324,19 @@ public class CoreDescriptor {
   public String getName() {
     return coreProperties.getProperty(CORE_NAME);
   }
+  
+  public void setProperty(String prop, String val) {
+    if (substitutableProperties.containsKey(prop)) {
+      substitutableProperties.setProperty(prop, val);
+      return;
+    }
+    coreProperties.setProperty(prop, val);
+  }
 
   public String getCollectionName() {
     return cloudDesc == null ? null : cloudDesc.getCollectionName();
   }
 
-  public CoreContainer getCoreContainer() {
-    return coreContainer;
-  }
-
   public CloudDescriptor getCloudDescriptor() {
     return cloudDesc;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
index b37402b..385d11b 100644
--- a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
+++ b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
@@ -163,6 +163,14 @@ public class CorePropertiesLocator implements CoresLocator {
     return cds;
   }
 
+  @Override
+  public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
+    if (cd == null) return null;
+    
+    Path coreProps = cd.getInstanceDir().resolve(CoreDescriptor.DEFAULT_EXTERNAL_PROPERTIES_FILE);
+    return buildCoreDescriptor(coreProps, cc);
+  }
+
   protected CoreDescriptor buildCoreDescriptor(Path propertiesFile, CoreContainer cc) {
 
     Path instanceDir = propertiesFile.getParent();
@@ -174,7 +182,7 @@ public class CorePropertiesLocator implements CoresLocator {
       for (String key : coreProperties.stringPropertyNames()) {
         propMap.put(key, coreProperties.getProperty(key));
       }
-      return new CoreDescriptor(cc, name, instanceDir, propMap);
+      return new CoreDescriptor(name, instanceDir, propMap, cc.getContainerProperties(), cc.isZooKeeperAware());
     }
     catch (IOException e) {
       logger.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/CoresLocator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoresLocator.java b/solr/core/src/java/org/apache/solr/core/CoresLocator.java
index d4f40cd..beaa690 100644
--- a/solr/core/src/java/org/apache/solr/core/CoresLocator.java
+++ b/solr/core/src/java/org/apache/solr/core/CoresLocator.java
@@ -69,4 +69,13 @@ public interface CoresLocator {
    */
   public List<CoreDescriptor> discover(CoreContainer cc);
 
+  /**
+   * reload an existing CoreDescriptor, that is read it from disk.
+   * 
+   * @param cc the CoreContainer
+   * @param cd the old CoreDescriptor. If null, this is a no-op
+   * @return the reloaded coreDescriptor or null          
+   */
+  public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd);
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
index cc24e6c..20824ab 100644
--- a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
@@ -24,7 +24,6 @@ import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.nio.file.NoSuchFileException;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
@@ -321,13 +320,6 @@ public abstract class DirectoryFactory implements NamedListInitializedPlugin,
     return cd.getInstanceDir().resolve(cd.getDataDir()).toAbsolutePath().toString();
   }
 
-  /**
-   * Optionally allow the DirectoryFactory to request registration of some MBeans.
-   */
-  public Collection<SolrInfoMBean> offerMBeans() {
-    return Collections.emptySet();
-  }
-
   public void cleanupOldIndexDirectories(final String dataDirPath, final String currentIndexDirPath, boolean afterCoreReload) {
     File dataDir = new File(dataDirPath);
     if (!dataDir.isDirectory()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
index db953d3..260a991 100644
--- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.net.URLEncoder;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
@@ -51,6 +50,8 @@ import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.store.blockcache.BlockCache;
 import org.apache.solr.store.blockcache.BlockDirectory;
 import org.apache.solr.store.blockcache.BlockDirectoryCache;
@@ -70,7 +71,7 @@ import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.RemovalListener;
 import com.google.common.cache.RemovalNotification;
 
-public class HdfsDirectoryFactory extends CachingDirectoryFactory implements SolrCoreAware {
+public class HdfsDirectoryFactory extends CachingDirectoryFactory implements SolrCoreAware, SolrMetricProducer {
   private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final String BLOCKCACHE_SLAB_COUNT = "solr.hdfs.blockcache.slab.count";
@@ -493,13 +494,14 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
   }
 
   @Override
-  public Collection<SolrInfoMBean> offerMBeans() {
-    return Arrays.<SolrInfoMBean>asList(MetricsHolder.metrics, LocalityHolder.reporter);
+  public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
+    MetricsHolder.metrics.initializeMetrics(manager, registry, scope);
+    LocalityHolder.reporter.initializeMetrics(manager, registry, scope);
   }
 
   @Override
   public void inform(SolrCore core) {
-    setHost(core.getCoreDescriptor().getCoreContainer().getHostName());
+    setHost(core.getCoreContainer().getHostName());
   }
 
   @VisibleForTesting

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
deleted file mode 100644
index 4fb0dcd..0000000
--- a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
+++ /dev/null
@@ -1,478 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import javax.management.Attribute;
-import javax.management.AttributeList;
-import javax.management.AttributeNotFoundException;
-import javax.management.DynamicMBean;
-import javax.management.InstanceNotFoundException;
-import javax.management.InvalidAttributeValueException;
-import javax.management.MBeanAttributeInfo;
-import javax.management.MBeanException;
-import javax.management.MBeanInfo;
-import javax.management.MBeanServer;
-import javax.management.MBeanServerFactory;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-import javax.management.Query;
-import javax.management.QueryExp;
-import javax.management.ReflectionException;
-import javax.management.openmbean.OpenMBeanAttributeInfoSupport;
-import javax.management.openmbean.OpenType;
-import javax.management.openmbean.SimpleType;
-import javax.management.remote.JMXConnectorServer;
-import javax.management.remote.JMXConnectorServerFactory;
-import javax.management.remote.JMXServiceURL;
-import java.lang.invoke.MethodHandles;
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Hashtable;
-import java.util.List;
-import java.util.Locale;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.lucene.store.AlreadyClosedException;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrConfig.JmxConfiguration;
-import org.apache.solr.metrics.reporters.JmxObjectNameFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.apache.solr.common.params.CommonParams.ID;
-import static org.apache.solr.common.params.CommonParams.NAME;
-
-/**
- * <p>
- * Responsible for finding (or creating) a MBeanServer from given configuration
- * and registering all SolrInfoMBean objects with JMX.
- * </p>
- * <p>
- * Please see http://wiki.apache.org/solr/SolrJmx for instructions on usage and configuration
- * </p>
- *
- *
- * @see org.apache.solr.core.SolrConfig.JmxConfiguration
- * @since solr 1.3
- */
-public class JmxMonitoredMap<K, V> extends
-        ConcurrentHashMap<String, SolrInfoMBean> {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  private static final String REPORTER_NAME = "_jmx_";
-
-  // set to true to use cached statistics NamedLists between getMBeanInfo calls to work
-  // around over calling getStatistics on MBeanInfos when iterating over all attributes (SOLR-6586)
-  private final boolean useCachedStatsBetweenGetMBeanInfoCalls = Boolean.getBoolean("useCachedStatsBetweenGetMBeanInfoCalls");
-  
-  private final MBeanServer server;
-
-  private final String jmxRootName;
-
-  private final String coreHashCode;
-
-  private final JmxObjectNameFactory nameFactory;
-
-  private final String registryName;
-
-  public JmxMonitoredMap(String coreName, String coreHashCode, String registryName,
-                         final JmxConfiguration jmxConfig) {
-    this.coreHashCode = coreHashCode;
-    this.registryName = registryName;
-    jmxRootName = (null != jmxConfig.rootName ?
-                   jmxConfig.rootName
-                   : ("solr" + (null != coreName ? "/" + coreName : "")));
-      
-    if (jmxConfig.serviceUrl == null) {
-      List<MBeanServer> servers = null;
-
-      if (jmxConfig.agentId == null) {
-        // Try to find the first MBeanServer
-        servers = MBeanServerFactory.findMBeanServer(null);
-      } else if (jmxConfig.agentId != null) {
-        // Try to find the first MBean server with the given agentId
-        servers = MBeanServerFactory.findMBeanServer(jmxConfig.agentId);
-        // throw Exception if no servers were found with the given agentId
-        if (servers == null || servers.isEmpty())
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                  "No JMX Servers found with agentId: " + jmxConfig.agentId);
-      }
-
-      if (servers == null || servers.isEmpty()) {
-        server = null;
-        nameFactory = null;
-        log.debug("No JMX servers found, not exposing Solr information with JMX.");
-        return;
-      }
-      server = servers.get(0);
-      log.info("JMX monitoring is enabled. Adding Solr mbeans to JMX Server: "
-               + server);
-    } else {
-      MBeanServer newServer = null;
-      try {
-        // Create a new MBeanServer with the given serviceUrl
-        newServer = MBeanServerFactory.newMBeanServer();
-        JMXConnectorServer connector = JMXConnectorServerFactory
-                .newJMXConnectorServer(new JMXServiceURL(jmxConfig.serviceUrl),
-                        null, newServer);
-        connector.start();
-        log.info("JMX monitoring is enabled at " + jmxConfig.serviceUrl);
-      } catch (Exception e) {
-        // Release the reference
-        throw new RuntimeException("Could not start JMX monitoring ", e);
-      }
-      server = newServer;
-    }
-    nameFactory = new JmxObjectNameFactory(REPORTER_NAME + coreHashCode, registryName);
-  }
-
-  /**
-   * Clears the map and unregisters all SolrInfoMBeans in the map from
-   * MBeanServer
-   */
-  @Override
-  public void clear() {
-    if (server != null) {
-      QueryExp exp = Query.or(Query.eq(Query.attr("coreHashCode"), Query.value(coreHashCode)),
-                            Query.eq(Query.attr("reporter"), Query.value(REPORTER_NAME + coreHashCode)));
-      
-      Set<ObjectName> objectNames = null;
-      try {
-        objectNames = server.queryNames(null, exp);
-      } catch (Exception e) {
-        log.warn("Exception querying for mbeans", e);
-      }
-      
-      if (objectNames != null)  {
-        for (ObjectName name : objectNames) {
-          try {
-            server.unregisterMBean(name);
-          } catch (InstanceNotFoundException ie) {
-            // ignore - someone else already deleted this one
-          } catch (Exception e) {
-            log.warn("Exception un-registering mbean {}", name, e);
-          }
-        }
-      }
-    }
-
-    super.clear();
-  }
-
-  /**
-   * Adds the SolrInfoMBean to the map and registers the given SolrInfoMBean
-   * instance with the MBeanServer defined for this core. If a SolrInfoMBean is
-   * already registered with the MBeanServer then it is unregistered and then
-   * re-registered.
-   *
-   * @param key      the JMX type name for this SolrInfoMBean
-   * @param infoBean the SolrInfoMBean instance to be registered
-   */
-  @Override
-  public SolrInfoMBean put(String key, SolrInfoMBean infoBean) {
-    if (server != null && infoBean != null) {
-      try {
-        // back-compat name
-        ObjectName name = getObjectName(key, infoBean);
-        if (server.isRegistered(name))
-          server.unregisterMBean(name);
-        SolrDynamicMBean mbean = new SolrDynamicMBean(coreHashCode, infoBean, useCachedStatsBetweenGetMBeanInfoCalls);
-        server.registerMBean(mbean, name);
-        // now register it also under new name
-        String beanName = createBeanName(infoBean, key);
-        name = nameFactory.createName(null, registryName, beanName);
-        if (server.isRegistered(name))
-          server.unregisterMBean(name);
-        server.registerMBean(mbean, name);
-      } catch (Exception e) {
-        log.warn( "Failed to register info bean: key=" + key + ", infoBean=" + infoBean, e);
-      }
-    }
-
-    return super.put(key, infoBean);
-  }
-
-  private String createBeanName(SolrInfoMBean infoBean, String key) {
-    if (infoBean.getCategory() == null) {
-      throw new IllegalArgumentException("SolrInfoMBean.category must never be null: " + infoBean);
-    }
-    StringBuilder sb = new StringBuilder();
-    sb.append(infoBean.getCategory().toString());
-    sb.append('.');
-    sb.append(key);
-    sb.append('.');
-    sb.append(infoBean.getName());
-    return sb.toString();
-  }
-
-  /**
-   * Removes the SolrInfoMBean object at the given key and unregisters it from
-   * MBeanServer
-   *
-   * @param key the JMX type name for this SolrInfoMBean
-   */
-  @Override
-  public SolrInfoMBean remove(Object key) {
-    SolrInfoMBean infoBean = get(key);
-    if (infoBean != null) {
-      try {
-        unregister((String) key, infoBean);
-      } catch (RuntimeException e) {
-        log.warn( "Failed to unregister info bean: " + key, e);
-      }
-    }
-    return super.remove(key);
-  }
-
-  private void unregister(String key, SolrInfoMBean infoBean) {
-    if (server == null)
-      return;
-
-    try {
-      // remove legacy name
-      ObjectName name = getObjectName(key, infoBean);
-      if (server.isRegistered(name) && coreHashCode.equals(server.getAttribute(name, "coreHashCode"))) {
-        server.unregisterMBean(name);
-      }
-      // remove new name
-      String beanName = createBeanName(infoBean, key);
-      name = nameFactory.createName(null, registryName, beanName);
-      if (server.isRegistered(name)) {
-        server.unregisterMBean(name);
-      }
-    } catch (Exception e) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-              "Failed to unregister info bean: " + key, e);
-    }
-  }
-
-  private ObjectName getObjectName(String key, SolrInfoMBean infoBean)
-          throws MalformedObjectNameException {
-    Hashtable<String, String> map = new Hashtable<>();
-    map.put("type", key);
-    if (infoBean.getName() != null && !"".equals(infoBean.getName())) {
-      map.put(ID, infoBean.getName());
-    }
-    return ObjectName.getInstance(jmxRootName, map);
-  }
-
-  /** For test verification */
-  public MBeanServer getServer() {
-    return server;
-  }
-
-  /**
-   * DynamicMBean is used to dynamically expose all SolrInfoMBean
-   * getStatistics() NameList keys as String getters.
-   */
-  static class SolrDynamicMBean implements DynamicMBean {
-    private SolrInfoMBean infoBean;
-
-    private HashSet<String> staticStats;
-
-    private String coreHashCode;
-    
-    private volatile NamedList cachedDynamicStats;
-    
-    private boolean useCachedStatsBetweenGetMBeanInfoCalls;
-    
-    public SolrDynamicMBean(String coreHashCode, SolrInfoMBean managedResource) {
-      this(coreHashCode, managedResource, false);
-    }
-
-    public SolrDynamicMBean(String coreHashCode, SolrInfoMBean managedResource, boolean useCachedStatsBetweenGetMBeanInfoCalls) {
-      this.useCachedStatsBetweenGetMBeanInfoCalls = useCachedStatsBetweenGetMBeanInfoCalls;
-      if (managedResource instanceof JmxAugmentedSolrInfoMBean) {
-        final JmxAugmentedSolrInfoMBean jmxSpecific = (JmxAugmentedSolrInfoMBean)managedResource;
-        this.infoBean = new SolrInfoMBeanWrapper(jmxSpecific) {
-          @Override
-          public NamedList getStatistics() { return jmxSpecific.getStatisticsForJmx(); }
-        };
-      } else {
-        this.infoBean = managedResource;
-      }
-      staticStats = new HashSet<>();
-
-      // For which getters are already available in SolrInfoMBean
-      staticStats.add(NAME);
-      staticStats.add("version");
-      staticStats.add("description");
-      staticStats.add("category");
-      staticStats.add("source");
-      this.coreHashCode = coreHashCode;
-    }
-
-    @Override
-    public MBeanInfo getMBeanInfo() {
-      ArrayList<MBeanAttributeInfo> attrInfoList = new ArrayList<>();
-
-      for (String stat : staticStats) {
-        attrInfoList.add(new MBeanAttributeInfo(stat, String.class.getName(),
-                null, true, false, false));
-      }
-
-      // add core's hashcode
-      attrInfoList.add(new MBeanAttributeInfo("coreHashCode", String.class.getName(),
-                null, true, false, false));
-
-      try {
-        NamedList dynamicStats = infoBean.getStatistics();
-        
-        if (useCachedStatsBetweenGetMBeanInfoCalls) {
-          cachedDynamicStats = dynamicStats;
-        }
-        
-        if (dynamicStats != null) {
-          for (int i = 0; i < dynamicStats.size(); i++) {
-            String name = dynamicStats.getName(i);
-            if (staticStats.contains(name)) {
-              continue;
-            }
-            Class type = dynamicStats.get(name).getClass();
-            OpenType typeBox = determineType(type);
-            if (type.equals(String.class) || typeBox == null) {
-              attrInfoList.add(new MBeanAttributeInfo(dynamicStats.getName(i),
-                  String.class.getName(), null, true, false, false));
-            } else {
-              attrInfoList.add(new OpenMBeanAttributeInfoSupport(
-                  dynamicStats.getName(i), dynamicStats.getName(i), typeBox,
-                  true, false, false));
-            }
-          }
-        }
-      } catch (Exception e) {
-        // don't log issue if the core is closing
-        if (!(SolrException.getRootCause(e) instanceof AlreadyClosedException))
-          log.warn("Could not getStatistics on info bean {}", infoBean.getName(), e);
-      }
-
-      MBeanAttributeInfo[] attrInfoArr = attrInfoList
-              .toArray(new MBeanAttributeInfo[attrInfoList.size()]);
-      return new MBeanInfo(getClass().getName(), infoBean
-              .getDescription(), attrInfoArr, null, null, null);
-    }
-
-    private OpenType determineType(Class type) {
-      try {
-        for (Field field : SimpleType.class.getFields()) {
-          if (field.getType().equals(SimpleType.class)) {
-            SimpleType candidate = (SimpleType) field.get(SimpleType.class);
-            if (candidate.getTypeName().equals(type.getName())) {
-              return candidate;
-            }
-          }
-        }
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-      return null;
-    }
-
-    @Override
-    public Object getAttribute(String attribute)
-            throws AttributeNotFoundException, MBeanException, ReflectionException {
-      Object val;
-      if ("coreHashCode".equals(attribute)) {
-        val = coreHashCode;
-      } else if (staticStats.contains(attribute) && attribute != null
-              && attribute.length() > 0) {
-        try {
-          String getter = "get" + attribute.substring(0, 1).toUpperCase(Locale.ROOT)
-                  + attribute.substring(1);
-          Method meth = infoBean.getClass().getMethod(getter);
-          val = meth.invoke(infoBean);
-        } catch (Exception e) {
-          throw new AttributeNotFoundException(attribute);
-        }
-      } else {
-        NamedList stats = null;
-        if (useCachedStatsBetweenGetMBeanInfoCalls) {
-          NamedList cachedStats = this.cachedDynamicStats;
-          if (cachedStats != null) {
-            stats = cachedStats;
-          }
-        }
-        if (stats == null) {
-          stats = infoBean.getStatistics();
-        }
-        val = stats.get(attribute);
-      }
-
-      if (val != null) {
-        // It's String or one of the simple types, just return it as JMX suggests direct support for such types
-        for (String simpleTypeName : SimpleType.ALLOWED_CLASSNAMES_LIST) {
-          if (val.getClass().getName().equals(simpleTypeName)) {
-            return val;
-          }
-        }
-        // It's an arbitrary object which could be something complex and odd, return its toString, assuming that is
-        // a workable representation of the object
-        return val.toString();
-      }
-      return null;
-    }
-
-    @Override
-    public AttributeList getAttributes(String[] attributes) {
-      AttributeList list = new AttributeList();
-      for (String attribute : attributes) {
-        try {
-          list.add(new Attribute(attribute, getAttribute(attribute)));
-        } catch (Exception e) {
-          log.warn("Could not get attribute " + attribute);
-        }
-      }
-
-      return list;
-    }
-
-    @Override
-    public void setAttribute(Attribute attribute)
-            throws AttributeNotFoundException, InvalidAttributeValueException,
-            MBeanException, ReflectionException {
-      throw new UnsupportedOperationException("Operation not Supported");
-    }
-
-    @Override
-    public AttributeList setAttributes(AttributeList attributes) {
-      throw new UnsupportedOperationException("Operation not Supported");
-    }
-
-    @Override
-    public Object invoke(String actionName, Object[] params, String[] signature)
-            throws MBeanException, ReflectionException {
-      throw new UnsupportedOperationException("Operation not Supported");
-    }
-  }
-
-  /**
-   * SolrInfoMBean that provides JMX-specific statistics.  Used, for example,
-   * if generating full statistics is expensive; the expensive statistics can
-   * be generated normally for use with the web ui, while an abbreviated version
-   * are generated for period jmx use.
-   */
-  public interface JmxAugmentedSolrInfoMBean extends SolrInfoMBean {
-    /**
-     * JMX-specific statistics
-     */
-    public NamedList getStatisticsForJmx();
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/NodeConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
index de2dcea..7a209d0 100644
--- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
@@ -17,7 +17,10 @@
 package org.apache.solr.core;
 
 import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.Properties;
+import java.util.Set;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.logging.LogWatcherConfig;
@@ -64,6 +67,8 @@ public class NodeConfig {
 
   private final PluginInfo[] metricReporterPlugins;
 
+  private final Set<String> hiddenSysProps;
+
   private final PluginInfo transientCacheConfig;
 
   private NodeConfig(String nodeName, Path coreRootDirectory, Path configSetBaseDirectory, String sharedLibDirectory,
@@ -73,7 +78,7 @@ public class NodeConfig {
                      LogWatcherConfig logWatcherConfig, CloudConfig cloudConfig, Integer coreLoadThreads,
                      int transientCacheSize, boolean useSchemaCache, String managementPath, SolrResourceLoader loader,
                      Properties solrProperties, PluginInfo[] backupRepositoryPlugins,
-                     PluginInfo[] metricReporterPlugins, PluginInfo transientCacheConfig) {
+                     PluginInfo[] metricReporterPlugins, Set<String> hiddenSysProps, PluginInfo transientCacheConfig) {
     this.nodeName = nodeName;
     this.coreRootDirectory = coreRootDirectory;
     this.configSetBaseDirectory = configSetBaseDirectory;
@@ -94,6 +99,7 @@ public class NodeConfig {
     this.solrProperties = solrProperties;
     this.backupRepositoryPlugins = backupRepositoryPlugins;
     this.metricReporterPlugins = metricReporterPlugins;
+    this.hiddenSysProps = hiddenSysProps;
     this.transientCacheConfig = transientCacheConfig;
 
     if (this.cloudConfig != null && this.getCoreLoadThreadCount(false) < 2) {
@@ -187,6 +193,10 @@ public class NodeConfig {
     return metricReporterPlugins;
   }
 
+  public Set<String> getHiddenSysProps() {
+    return hiddenSysProps;
+  }
+
   public PluginInfo getTransientCachePluginInfo() { return transientCacheConfig; }
 
   public static class NodeConfigBuilder {
@@ -211,6 +221,7 @@ public class NodeConfig {
     private Properties solrProperties = new Properties();
     private PluginInfo[] backupRepositoryPlugins;
     private PluginInfo[] metricReporterPlugins;
+    private Set<String> hiddenSysProps = new HashSet<>(DEFAULT_HIDDEN_SYS_PROPS);
     private PluginInfo transientCacheConfig;
 
     private final SolrResourceLoader loader;
@@ -227,6 +238,14 @@ public class NodeConfig {
     private static final String DEFAULT_COLLECTIONSHANDLERCLASS = "org.apache.solr.handler.admin.CollectionsHandler";
     private static final String DEFAULT_CONFIGSETSHANDLERCLASS = "org.apache.solr.handler.admin.ConfigSetsHandler";
 
+    public static final Set<String> DEFAULT_HIDDEN_SYS_PROPS = new HashSet<>(Arrays.asList(
+        "javax.net.ssl.keyStorePassword",
+        "javax.net.ssl.trustStorePassword",
+        "basicauth",
+        "zkDigestPassword",
+        "zkDigestReadonlyPassword"
+    ));
+
     public NodeConfigBuilder(String nodeName, SolrResourceLoader loader) {
       this.nodeName = nodeName;
       this.loader = loader;
@@ -331,11 +350,16 @@ public class NodeConfig {
       return this;
     }
 
+    public NodeConfigBuilder setHiddenSysProps(Set<String> hiddenSysProps) {
+      this.hiddenSysProps = hiddenSysProps;
+      return this;
+    }
+
     public NodeConfig build() {
       return new NodeConfig(nodeName, coreRootDirectory, configSetBaseDirectory, sharedLibDirectory, shardHandlerFactoryConfig,
                             updateShardHandlerConfig, coreAdminHandlerClass, collectionsAdminHandlerClass, infoHandlerClass, configSetsHandlerClass,
                             logWatcherConfig, cloudConfig, coreLoadThreads, transientCacheSize, useSchemaCache, managementPath, loader, solrProperties,
-                            backupRepositoryPlugins, metricReporterPlugins, transientCacheConfig);
+                            backupRepositoryPlugins, metricReporterPlugins, hiddenSysProps, transientCacheConfig);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/PluginBag.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java
index 65978f3..4c0858e 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginBag.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java
@@ -294,8 +294,8 @@ public class PluginBag<T> implements AutoCloseable {
 
   private void registerMBean(Object inst, SolrCore core, String pluginKey) {
     if (core == null) return;
-    if (inst instanceof SolrInfoMBean) {
-      SolrInfoMBean mBean = (SolrInfoMBean) inst;
+    if (inst instanceof SolrInfoBean) {
+      SolrInfoBean mBean = (SolrInfoBean) inst;
       String name = (inst instanceof SolrRequestHandler) ? pluginKey : mBean.getName();
       core.registerInfoBean(name, mBean);
     }
@@ -455,7 +455,7 @@ public class PluginBag<T> implements AutoCloseable {
     }
 
     public RuntimeLib(SolrCore core) {
-      coreContainer = core.getCoreDescriptor().getCoreContainer();
+      coreContainer = core.getCoreContainer();
     }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index a244420..4e7ab48 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -276,18 +276,12 @@ public class SolrConfig extends Config implements MapSerializable {
     hashSetInverseLoadFactor = 1.0f / getFloat("//HashDocSet/@loadFactor", 0.75f);
     hashDocSetMaxSize = getInt("//HashDocSet/@maxSize", 3000);
 
-    httpCachingConfig = new HttpCachingConfig(this);
+    if (get("jmx", null) != null) {
+      log.warn("solrconfig.xml: <jmx> is no longer supported, use solr.xml:/metrics/reporter section instead");
+    }
 
-    Node jmx = getNode("jmx", false);
-    if (jmx != null) {
-      jmxConfig = new JmxConfiguration(true,
-          get("jmx/@agentId", null),
-          get("jmx/@serviceUrl", null),
-          get("jmx/@rootName", null));
+    httpCachingConfig = new HttpCachingConfig(this);
 
-    } else {
-      jmxConfig = new JmxConfiguration(false, null, null, null);
-    }
     maxWarmingSearchers = getInt("query/maxWarmingSearchers", 1);
     slowQueryThresholdMillis = getInt("query/slowQueryThresholdMillis", -1);
     for (SolrPluginInfo plugin : plugins) loadPluginInfo(plugin);
@@ -510,48 +504,12 @@ public class SolrConfig extends Config implements MapSerializable {
   protected String dataDir;
   public final int slowQueryThresholdMillis;  // threshold above which a query is considered slow
 
-  //JMX configuration
-  public final JmxConfiguration jmxConfig;
-
   private final HttpCachingConfig httpCachingConfig;
 
   public HttpCachingConfig getHttpCachingConfig() {
     return httpCachingConfig;
   }
 
-  public static class JmxConfiguration implements MapSerializable {
-    public boolean enabled = false;
-    public String agentId;
-    public String serviceUrl;
-    public String rootName;
-
-    public JmxConfiguration(boolean enabled,
-                            String agentId,
-                            String serviceUrl,
-                            String rootName) {
-      this.enabled = enabled;
-      this.agentId = agentId;
-      this.serviceUrl = serviceUrl;
-      this.rootName = rootName;
-
-      if (agentId != null && serviceUrl != null) {
-        throw new SolrException
-            (SolrException.ErrorCode.SERVER_ERROR,
-                "Incorrect JMX Configuration in solrconfig.xml, " +
-                    "both agentId and serviceUrl cannot be specified at the same time");
-      }
-
-    }
-
-    @Override
-    public Map<String, Object> toMap(Map<String, Object> map) {
-      map.put("agentId", agentId);
-      map.put("serviceUrl", serviceUrl);
-      map.put("rootName", rootName);
-      return map;
-    }
-  }
-
   public static class HttpCachingConfig implements MapSerializable {
 
     /**
@@ -858,7 +816,6 @@ public class SolrConfig extends Config implements MapSerializable {
     m.put("queryResultMaxDocsCached", queryResultMaxDocsCached);
     m.put("enableLazyFieldLoading", enableLazyFieldLoading);
     m.put("maxBooleanClauses", booleanQueryMaxClauseCount);
-    if (jmxConfig != null) result.put("jmx", jmxConfig);
     for (SolrPluginInfo plugin : plugins) {
       List<PluginInfo> infos = getPluginInfos(plugin.clazz.getName());
       if (infos == null || infos.isEmpty()) continue;
@@ -884,7 +841,6 @@ public class SolrConfig extends Config implements MapSerializable {
 
 
     addCacheConfig(m, filterCacheConfig, queryResultCacheConfig, documentCacheConfig, fieldValueCacheConfig);
-    if (jmxConfig != null) result.put("jmx", jmxConfig);
     m = new LinkedHashMap();
     result.put("requestDispatcher", m);
     m.put("handleSelect", handleSelect);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index a6ba2dc..b26fc2f 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -27,7 +27,6 @@ import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.lang.invoke.MethodHandles;
 import java.lang.reflect.Constructor;
-import java.net.URL;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.NoSuchFileException;
 import java.nio.file.Path;
@@ -58,6 +57,7 @@ import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.ReentrantLock;
 
 import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
 import com.google.common.collect.MapMaker;
 import org.apache.commons.io.FileUtils;
@@ -133,7 +133,7 @@ import org.apache.solr.schema.IndexSchemaFactory;
 import org.apache.solr.schema.ManagedIndexSchema;
 import org.apache.solr.schema.SimilarityFactory;
 import org.apache.solr.search.QParserPlugin;
-import org.apache.solr.search.SolrFieldCacheMBean;
+import org.apache.solr.search.SolrFieldCacheBean;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.search.ValueSourceParser;
 import org.apache.solr.search.stats.LocalStatsCache;
@@ -171,7 +171,7 @@ import static org.apache.solr.common.params.CommonParams.PATH;
 /**
  *
  */
-public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closeable {
+public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeable {
 
   public static final String version="1.0";
 
@@ -180,7 +180,6 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
 
   private String name;
   private String logid; // used to show what name is set
-  private CoreDescriptor coreDescriptor;
 
   private boolean isReloaded = false;
 
@@ -202,7 +201,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   private final PluginBag<UpdateRequestProcessorFactory> updateProcessors = new PluginBag<>(UpdateRequestProcessorFactory.class, this, true);
   private final Map<String,UpdateRequestProcessorChain> updateProcessorChains;
   private final SolrCoreMetricManager coreMetricManager;
-  private final Map<String, SolrInfoMBean> infoRegistry;
+  private final Map<String, SolrInfoBean> infoRegistry = new ConcurrentHashMap<>();
   private final IndexDeletionPolicyWrapper solrDelPolicy;
   private final SolrSnapshotMetaDataManager snapshotMgr;
   private final DirectoryFactory directoryFactory;
@@ -221,6 +220,13 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   private Counter newSearcherCounter;
   private Counter newSearcherMaxReachedCounter;
   private Counter newSearcherOtherErrorsCounter;
+  private final CoreContainer coreContainer;
+
+  private Set<String> metricNames = new HashSet<>();
+
+  public Set<String> getMetricNames() {
+    return metricNames;
+  }
 
   public Date getStartTimeStamp() { return startTime; }
 
@@ -424,10 +430,8 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   }
 
   public void setName(String v) {
-    String oldName = this.name;
     this.name = v;
     this.logid = (v==null)?"":("["+v+"] ");
-    this.coreDescriptor = new CoreDescriptor(v, this.coreDescriptor);
     if (coreMetricManager != null) {
       coreMetricManager.afterCoreSetName();
     }
@@ -448,14 +452,14 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   }
 
   /**
-   * Returns a Map of name vs SolrInfoMBean objects. The returned map is an instance of
+   * Returns a Map of name vs SolrInfoBean objects. The returned map is an instance of
    * a ConcurrentHashMap and therefore no synchronization is needed for putting, removing
    * or iterating over it.
    *
-   * @return the Info Registry map which contains SolrInfoMBean objects keyed by name
+   * @return the Info Registry map which contains SolrInfoBean objects keyed by name
    * @since solr 1.3
    */
-  public Map<String, SolrInfoMBean> getInfoRegistry() {
+  public Map<String, SolrInfoBean> getInfoRegistry() {
     return infoRegistry;
   }
 
@@ -636,9 +640,9 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
       boolean success = false;
       SolrCore core = null;
       try {
-        CoreDescriptor cd = new CoreDescriptor(coreDescriptor.getName(), coreDescriptor);
+        CoreDescriptor cd = new CoreDescriptor(name, getCoreDescriptor());
         cd.loadExtraProperties(); //Reload the extra properties
-        core = new SolrCore(getName(), getDataDir(), coreConfig.getSolrConfig(),
+        core = new SolrCore(coreContainer, getName(), getDataDir(), coreConfig.getSolrConfig(),
             coreConfig.getIndexSchema(), coreConfig.getProperties(),
             cd, updateHandler, solrDelPolicy, currentCore, true);
         
@@ -658,7 +662,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   }
 
   private DirectoryFactory initDirectoryFactory() {
-    return DirectoryFactory.loadDirectoryFactory(solrConfig, getCoreDescriptor().getCoreContainer(), coreMetricManager.getRegistryName());
+    return DirectoryFactory.loadDirectoryFactory(solrConfig, coreContainer, coreMetricManager.getRegistryName());
   }
 
   private RecoveryStrategy.Builder initRecoveryStrategyBuilder() {
@@ -845,12 +849,16 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     return createReloadedUpdateHandler(className, "Update Handler", updateHandler);
   }
 
-  public SolrCore(CoreDescriptor cd, ConfigSet coreConfig) {
-    this(cd.getName(), null, coreConfig.getSolrConfig(), coreConfig.getIndexSchema(), coreConfig.getProperties(),
+  public SolrCore(CoreContainer coreContainer, CoreDescriptor cd, ConfigSet coreConfig) {
+    this(coreContainer, cd.getName(), null, coreConfig.getSolrConfig(), coreConfig.getIndexSchema(), coreConfig.getProperties(),
         cd, null, null, null, false);
   }
 
-  
+  public CoreContainer getCoreContainer() {
+    return coreContainer;
+  }
+
+
   /**
    * Creates a new core and register it in the list of cores. If a core with the
    * same name already exists, it will be stopped and replaced by this one.
@@ -864,14 +872,18 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
    *
    * @since solr 1.3
    */
-  public SolrCore(String name, String dataDir, SolrConfig config,
-      IndexSchema schema, NamedList configSetProperties,
-      CoreDescriptor coreDescriptor, UpdateHandler updateHandler,
-      IndexDeletionPolicyWrapper delPolicy, SolrCore prev, boolean reload) {
+  public SolrCore(CoreContainer coreContainer, String name, String dataDir, SolrConfig config,
+                  IndexSchema schema, NamedList configSetProperties,
+                  CoreDescriptor coreDescriptor, UpdateHandler updateHandler,
+                  IndexDeletionPolicyWrapper delPolicy, SolrCore prev, boolean reload) {
+
+    this.coreContainer = coreContainer;
     
     assert ObjectReleaseTracker.track(searcherExecutor); // ensure that in unclean shutdown tests we still close this
-    
-    this.coreDescriptor = Objects.requireNonNull(coreDescriptor, "coreDescriptor cannot be null");
+
+    CoreDescriptor cd = Objects.requireNonNull(coreDescriptor, "coreDescriptor cannot be null");
+    coreContainer.solrCores.addCoreDescriptor(cd);
+
     setName(name);
     MDCLoggingContext.setCore(this);
     
@@ -900,14 +912,17 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
 
     checkVersionFieldExistsInSchema(schema, coreDescriptor);
 
-    SolrMetricManager metricManager = this.coreDescriptor.getCoreContainer().getMetricManager();
+    SolrMetricManager metricManager = coreContainer.getMetricManager();
 
     // initialize searcher-related metrics
     initializeMetrics(metricManager, coreMetricManager.getRegistryName(), null);
 
-    // Initialize JMX
-    this.infoRegistry = initInfoRegistry(name, config);
-    infoRegistry.put("fieldCache", new SolrFieldCacheMBean());
+    SolrFieldCacheBean solrFieldCacheBean = new SolrFieldCacheBean();
+    // this is registered at the CONTAINER level because it's not core-specific - for now we
+    // also register it here for back-compat
+    solrFieldCacheBean.initializeMetrics(metricManager, coreMetricManager.getRegistryName(), "core");
+    infoRegistry.put("fieldCache", solrFieldCacheBean);
+
 
     initSchema(config, schema);
 
@@ -998,15 +1013,9 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     // from the core.
     resourceLoader.inform(infoRegistry);
 
-    // Allow the directory factory to register MBeans as well
-    for (SolrInfoMBean bean : directoryFactory.offerMBeans()) {
-      log.debug("Registering JMX bean [{}] from directory factory.", bean.getName());
-      // Not worried about concurrency, so no reason to use putIfAbsent
-      if (infoRegistry.containsKey(bean.getName())){
-        log.debug("Ignoring JMX bean [{}] due to name conflict.", bean.getName());
-      } else {
-        infoRegistry.put(bean.getName(), bean);
-      }
+    // Allow the directory factory to report metrics
+    if (directoryFactory instanceof SolrMetricProducer) {
+      ((SolrMetricProducer)directoryFactory).initializeMetrics(metricManager, coreMetricManager.getRegistryName(), "directoryFactory");
     }
 
     // seed version buckets with max from index during core initialization ... requires a searcher!
@@ -1040,15 +1049,15 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
 
   /** Set UpdateLog to buffer updates if the slice is in construction. */
   private void bufferUpdatesIfConstructing(CoreDescriptor coreDescriptor) {
-    final CoreContainer cc = coreDescriptor.getCoreContainer();
-    if (cc != null && cc.isZooKeeperAware()) {
+    
+    if (coreContainer != null && coreContainer.isZooKeeperAware()) {
       if (reqHandlers.get("/get") == null) {
         log.warn("WARNING: RealTimeGetHandler is not registered at /get. " +
             "SolrCloud will always use full index replication instead of the more efficient PeerSync method.");
       }
 
       // ZK pre-register would have already happened so we read slice properties now
-      final ClusterState clusterState = cc.getZkController().getClusterState();
+      final ClusterState clusterState = coreContainer.getZkController().getClusterState();
       final DocCollection collection = clusterState.getCollection(coreDescriptor.getCloudDescriptor().getCollectionName());
       final Slice slice = collection.getSlice(coreDescriptor.getCloudDescriptor().getShardId());
       if (slice.getState() == Slice.State.CONSTRUCTION) {
@@ -1126,34 +1135,45 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
-    newSearcherCounter = manager.counter(registry, "new", Category.SEARCHER.toString());
-    newSearcherTimer = manager.timer(registry, "time", Category.SEARCHER.toString(), "new");
-    newSearcherWarmupTimer = manager.timer(registry, "warmup", Category.SEARCHER.toString(), "new");
-    newSearcherMaxReachedCounter = manager.counter(registry, "maxReached", Category.SEARCHER.toString(), "new");
-    newSearcherOtherErrorsCounter = manager.counter(registry, "errors", Category.SEARCHER.toString(), "new");
-
-    manager.registerGauge(registry, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString());
-    manager.registerGauge(registry, () -> startTime, true, "startTime", Category.CORE.toString());
-    manager.registerGauge(registry, () -> getOpenCount(), true, "refCount", Category.CORE.toString());
-    manager.registerGauge(registry, () -> resourceLoader.getInstancePath().toString(), true, "instanceDir", Category.CORE.toString());
-    manager.registerGauge(registry, () -> getIndexDir(), true, "indexDir", Category.CORE.toString());
-    manager.registerGauge(registry, () -> getIndexSize(), true, "sizeInBytes", Category.INDEX.toString());
-    manager.registerGauge(registry, () -> NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString());
-    manager.registerGauge(registry, () -> coreDescriptor.getCoreContainer().getCoreNames(this), true, "aliases", Category.CORE.toString());
+    newSearcherCounter = manager.counter(this, registry, "new", Category.SEARCHER.toString());
+    newSearcherTimer = manager.timer(this, registry, "time", Category.SEARCHER.toString(), "new");
+    newSearcherWarmupTimer = manager.timer(this, registry, "warmup", Category.SEARCHER.toString(), "new");
+    newSearcherMaxReachedCounter = manager.counter(this, registry, "maxReached", Category.SEARCHER.toString(), "new");
+    newSearcherOtherErrorsCounter = manager.counter(this, registry, "errors", Category.SEARCHER.toString(), "new");
+
+    manager.registerGauge(this, registry, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString());
+    manager.registerGauge(this, registry, () -> startTime, true, "startTime", Category.CORE.toString());
+    manager.registerGauge(this, registry, () -> getOpenCount(), true, "refCount", Category.CORE.toString());
+    manager.registerGauge(this, registry, () -> resourceLoader.getInstancePath().toString(), true, "instanceDir", Category.CORE.toString());
+    manager.registerGauge(this, registry, () -> getIndexDir(), true, "indexDir", Category.CORE.toString());
+    manager.registerGauge(this, registry, () -> getIndexSize(), true, "sizeInBytes", Category.INDEX.toString());
+    manager.registerGauge(this, registry, () -> NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString());
+    if (coreContainer != null) {
+      manager.registerGauge(this, registry, () -> coreContainer.getCoreNames(this), true, "aliases", Category.CORE.toString());
+      final CloudDescriptor cd = getCoreDescriptor().getCloudDescriptor();
+      if (cd != null) {
+        manager.registerGauge(this, registry, () -> {
+          if (cd.getCollectionName() != null) {
+            return cd.getCollectionName();
+          } else {
+            return "_notset_";
+          }
+        }, true, "collection", Category.CORE.toString());
+
+        manager.registerGauge(this, registry, () -> {
+          if (cd.getShardId() != null) {
+            return cd.getShardId();
+          } else {
+            return "_auto_";
+          }
+        }, true, "shard", Category.CORE.toString());
+      }
+    }
     // initialize disk total / free metrics
     Path dataDirPath = Paths.get(dataDir);
     File dataDirFile = dataDirPath.toFile();
-    manager.registerGauge(registry, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs");
-    manager.registerGauge(registry, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs");
-  }
-
-  private Map<String,SolrInfoMBean> initInfoRegistry(String name, SolrConfig config) {
-    if (config.jmxConfig.enabled) {
-      return new JmxMonitoredMap<String, SolrInfoMBean>(name, coreMetricManager.getRegistryName(), String.valueOf(this.hashCode()), config.jmxConfig);
-    } else  {
-      log.debug("JMX monitoring not detected for core: " + name);
-      return new ConcurrentHashMap<>();
-    }
+    manager.registerGauge(this, registry, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs");
+    manager.registerGauge(this, registry, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs");
   }
 
   private void checkVersionFieldExistsInSchema(IndexSchema schema, CoreDescriptor coreDescriptor) {
@@ -2685,6 +2705,9 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     for (PluginInfo info : pluginInfos) {
       T o = createInitInstance(info,type, type.getSimpleName(), defClassName);
       registry.put(info.name, o);
+      if (o instanceof SolrMetricProducer) {
+        coreMetricManager.registerMetricProducer(type.getSimpleName() + "." + info.name, (SolrMetricProducer)o);
+      }
       if(info.isDefault()){
         def = o;
       }
@@ -2692,6 +2715,12 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     return def;
   }
 
+  public void initDefaultPlugin(Object plugin, Class type) {
+    if (plugin instanceof SolrMetricProducer) {
+      coreMetricManager.registerMetricProducer(type.getSimpleName() + ".default", (SolrMetricProducer)plugin);
+    }
+  }
+
   /**For a given List of PluginInfo return the instances as a List
    * @param defClassName The default classname if PluginInfo#className == null
    * @return The instances initialized
@@ -2746,7 +2775,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     if (initArgs == null)
       initArgs = new NamedList<>();
 
-    String collection = coreDescriptor.getCollectionName();
+    String collection = getCoreDescriptor().getCollectionName();
     StorageIO storageIO =
         ManagedResourceStorage.newStorageIO(collection, resourceLoader, initArgs);
     mgr.init(resourceLoader, initArgs, storageIO);
@@ -2755,7 +2784,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   }
 
   public CoreDescriptor getCoreDescriptor() {
-    return coreDescriptor;
+    return coreContainer.getCoreDescriptor(name);
   }
 
   public IndexDeletionPolicyWrapper getDeletionPolicy(){
@@ -2775,15 +2804,10 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   }
 
   /////////////////////////////////////////////////////////////////////
-  // SolrInfoMBean stuff: Statistics and Module Info
+  // SolrInfoBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////
 
   @Override
-  public String getVersion() {
-    return SolrCore.version;
-  }
-
-  @Override
   public String getDescription() {
     return "SolrCore";
   }
@@ -2794,55 +2818,15 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
   }
 
   @Override
-  public String getSource() {
-    return null;
-  }
-
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
-  @Override
-  public NamedList getStatistics() {
-    NamedList<Object> lst = new SimpleOrderedMap<>(8);
-    lst.add("coreName", name==null ? "(null)" : name);
-    lst.add("startTime", startTime);
-    lst.add("refCount", getOpenCount());
-    lst.add("instanceDir", resourceLoader.getInstancePath());
-    lst.add("indexDir", getIndexDir());
-    long size = getIndexSize();
-    lst.add("sizeInBytes", size);
-    lst.add("size", NumberUtils.readableSize(size));
-
-    CoreDescriptor cd = getCoreDescriptor();
-    if (cd != null) {
-      if (null != cd && cd.getCoreContainer() != null) {
-        lst.add("aliases", getCoreDescriptor().getCoreContainer().getCoreNames(this));
-      }
-      CloudDescriptor cloudDesc = cd.getCloudDescriptor();
-      if (cloudDesc != null) {
-        String collection = cloudDesc.getCollectionName();
-        if (collection == null) {
-          collection = "_notset_";
-        }
-        lst.add("collection", collection);
-        String shard = cloudDesc.getShardId();
-        if (shard == null) {
-          shard = "_auto_";
-        }
-        lst.add("shard", shard);
-      }
-    }
-
-    return lst;
+  public MetricRegistry getMetricRegistry() {
+    return coreMetricManager.getRegistry();
   }
 
   public Codec getCodec() {
     return codec;
   }
 
-  public void unloadOnClose(boolean deleteIndexDir, boolean deleteDataDir, boolean deleteInstanceDir) {
+  public void unloadOnClose(final CoreDescriptor desc, boolean deleteIndexDir, boolean deleteDataDir, boolean deleteInstanceDir) {
     if (deleteIndexDir) {
       try {
         directoryFactory.remove(getIndexDir());
@@ -2865,13 +2849,12 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
 
         @Override
         public void postClose(SolrCore core) {
-          CoreDescriptor cd = core.getCoreDescriptor();
-          if (cd != null) {
+          if (desc != null) {
             try {
-              FileUtils.deleteDirectory(cd.getInstanceDir().toFile());
+              FileUtils.deleteDirectory(desc.getInstanceDir().toFile());
             } catch (IOException e) {
               SolrException.log(log, "Failed to delete instance dir for core:"
-                  + core.getName() + " dir:" + cd.getInstanceDir());
+                  + core.getName() + " dir:" + desc.getInstanceDir());
             }
           }
         }
@@ -2930,7 +2913,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
 
   public static Runnable getConfListener(SolrCore core, ZkSolrResourceLoader zkSolrResourceLoader) {
     final String coreName = core.getName();
-    final CoreContainer cc = core.getCoreDescriptor().getCoreContainer();
+    final CoreContainer cc = core.getCoreContainer();
     final String overlayPath = zkSolrResourceLoader.getConfigSetZkPath() + "/" + ConfigOverlay.RESOURCE_NAME;
     final String solrConfigPath = zkSolrResourceLoader.getConfigSetZkPath() + "/" + core.getSolrConfig().getName();
     String schemaRes = null;
@@ -2983,11 +2966,11 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     };
   }
 
-  public void registerInfoBean(String name, SolrInfoMBean solrInfoMBean) {
-    infoRegistry.put(name, solrInfoMBean);
+  public void registerInfoBean(String name, SolrInfoBean solrInfoBean) {
+    infoRegistry.put(name, solrInfoBean);
 
-    if (solrInfoMBean instanceof SolrMetricProducer) {
-      SolrMetricProducer producer = (SolrMetricProducer) solrInfoMBean;
+    if (solrInfoBean instanceof SolrMetricProducer) {
+      SolrMetricProducer producer = (SolrMetricProducer) solrInfoBean;
       coreMetricManager.registerMetricProducer(name, producer);
     }
   }
@@ -3064,7 +3047,6 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
     if (!BlobRepository.BLOB_KEY_PATTERN_CHECKER.matcher(key).matches()) {
       throw new IllegalArgumentException("invalid key format, must end in /N where N is the version number");
     }
-    CoreContainer coreContainer = getCoreDescriptor().getCoreContainer();
     // define the blob
     BlobRepository.BlobContentRef blobRef = coreContainer.getBlobRepository().getBlobIncRef(key, decoder);
     addCloseHook(new CloseHook() {
@@ -3074,7 +3056,7 @@ public final class SolrCore implements SolrInfoMBean, SolrMetricProducer, Closea
 
       @Override
       public void postClose(SolrCore core) {
-        core.getCoreDescriptor().getCoreContainer().getBlobRepository().decrementBlobRefCount(blobRef);
+        coreContainer.getBlobRepository().decrementBlobRefCount(blobRef);
       }
     });
     return blobRef;


[08/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
index 36e8c10..02d2ac2 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java
@@ -16,16 +16,47 @@
  */
 package org.apache.solr.schema;
 
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
 import java.util.Locale;
 import java.util.Set;
+import java.util.SortedSet;
 import java.util.TreeSet;
 
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.IndexOrDocValuesQuery;
 import org.apache.lucene.search.PointRangeQuery;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.index.SlowCompositeReaderWrapper;
+import org.apache.solr.schema.IndexSchema.DynamicField;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.search.SolrQueryParser;
 import org.apache.solr.util.DateMathParser;
+import org.apache.solr.util.RefCounted;
 import org.junit.After;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -57,17 +88,24 @@ public class TestPointFields extends SolrTestCaseJ4 {
   public void testIntPointFieldExactQuery() throws Exception {
     doTestIntPointFieldExactQuery("number_p_i", false);
     doTestIntPointFieldExactQuery("number_p_i_mv", false);
+    doTestIntPointFieldExactQuery("number_p_i_dv", false);
+    doTestIntPointFieldExactQuery("number_p_i_mv_dv", false);
     doTestIntPointFieldExactQuery("number_p_i_ni_dv", false);
     doTestIntPointFieldExactQuery("number_p_i_ni_ns_dv", false);
     doTestIntPointFieldExactQuery("number_p_i_ni_mv_dv", false);
   }
   
   @Test
+  public void testIntPointFieldNonSearchableExactQuery() throws Exception {
+    doTestIntPointFieldExactQuery("number_p_i_ni", false, false);
+    doTestIntPointFieldExactQuery("number_p_i_ni_ns", false, false);
+  }
+  
+  @Test
   public void testIntPointFieldReturn() throws Exception {
     testPointFieldReturn("number_p_i", "int", new String[]{"0", "-1", "2", "3", "43", "52", "-60", "74", "80", "99"});
-    clearIndex();
-    assertU(commit());
     testPointFieldReturn("number_p_i_dv_ns", "int", new String[]{"0", "-1", "2", "3", "43", "52", "-60", "74", "80", "99"});
+    testPointFieldReturn("number_p_i_ni", "int", new String[]{"0", "-1", "2", "3", "43", "52", "-60", "74", "80", "99"});
   }
   
   @Test
@@ -78,8 +116,43 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
-  public void testIntPointFieldSort() throws Exception {
-    doTestPointFieldSort("number_p_i", "number_p_i_dv", new String[]{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"});
+  public void testIntPointFieldNonSearchableRangeQuery() throws Exception {
+    doTestPointFieldNonSearchableRangeQuery("number_p_i_ni", "42");
+    doTestPointFieldNonSearchableRangeQuery("number_p_i_ni_ns", "42");
+    doTestPointFieldNonSearchableRangeQuery("number_p_i_ni_ns_mv", "42", "666");
+  }
+  
+  @Test
+  public void testIntPointFieldSortAndFunction() throws Exception {
+
+    final SortedSet<String> regexToTest = dynFieldRegexesForType(IntPointField.class);
+    final String[] sequential = new String[]{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
+    
+    for (String r : Arrays.asList("*_p_i", "*_p_i_dv", "*_p_i_dv_ns", "*_p_i_ni_dv",
+                                  "*_p_i_ni_dv_ns", "*_p_i_ni_ns_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSort(r.replace("*","number"), sequential);
+      // TODO: test some randomly generated (then sorted) arrays (with dups and/or missing values)
+
+      doTestIntPointFunctionQuery(r.replace("*","number"), "int");
+    }
+    
+    for (String r : Arrays.asList("*_p_i_ni", "*_p_i_ni_ns")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "w/o docValues", "42");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "w/o docValues", "42");
+    }
+    
+    for (String r : Arrays.asList("*_p_i_mv", "*_p_i_ni_mv", "*_p_i_ni_mv_dv", "*_p_i_ni_dv_ns_mv",
+                                  "*_p_i_ni_ns_mv", "*_p_i_dv_ns_mv", "*_p_i_mv_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "42");
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "42", "666");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "42");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "42", "666");
+   }
+    
+    assertEquals("Missing types in the test", Collections.<String>emptySet(), regexToTest);
   }
   
   @Test
@@ -91,13 +164,6 @@ public class TestPointFields extends SolrTestCaseJ4 {
   public void testIntPointFieldRangeFacet() throws Exception {
     doTestIntPointFieldRangeFacet("number_p_i_dv", "number_p_i");
   }
-  
-  
-  @Test
-  public void testIntPointFunctionQuery() throws Exception {
-    doTestIntPointFunctionQuery("number_p_i_dv", "number_p_i", "int");
-  }
-
 
   @Test
   public void testIntPointStats() throws Exception {
@@ -112,6 +178,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
     testPointFieldMultiValuedExactQuery("number_p_i_mv", getSequentialStringArrayWithInts(20));
     testPointFieldMultiValuedExactQuery("number_p_i_ni_mv_dv", getSequentialStringArrayWithInts(20));
   }
+
+  @Test
+  public void testIntPointFieldMultiValuedNonSearchableExactQuery() throws Exception {
+    testPointFieldMultiValuedExactQuery("number_p_i_ni_mv", getSequentialStringArrayWithInts(20), false);
+    testPointFieldMultiValuedExactQuery("number_p_i_ni_ns_mv", getSequentialStringArrayWithInts(20), false);
+  }
   
   @Test
   public void testIntPointFieldMultiValuedReturn() throws Exception {
@@ -127,6 +199,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
     testPointFieldMultiValuedRangeQuery("number_p_i_mv_dv", "int", getSequentialStringArrayWithInts(20));
   }
   
+  @Test
+  public void testIntPointFieldNotIndexed() throws Exception {
+    doTestFieldNotIndexed("number_p_i_ni", getSequentialStringArrayWithInts(10));
+    doTestFieldNotIndexed("number_p_i_ni_mv", getSequentialStringArrayWithInts(10));
+  }
+  
   //TODO MV SORT?
   @Test
   public void testIntPointFieldMultiValuedFacetField() throws Exception {
@@ -166,9 +244,9 @@ public class TestPointFields extends SolrTestCaseJ4 {
   
   @Test
   public void testIntPointSetQuery() throws Exception {
-    doTestSetQueries("number_p_i", getRandomStringArrayWithInts(10, false), false);
-    doTestSetQueries("number_p_i_mv", getRandomStringArrayWithInts(10, false), true);
-    doTestSetQueries("number_p_i_ni_dv", getRandomStringArrayWithInts(10, false), false);
+    doTestSetQueries("number_p_i", getRandomStringArrayWithInts(20, false), false);
+    doTestSetQueries("number_p_i_mv", getRandomStringArrayWithInts(20, false), true);
+    doTestSetQueries("number_p_i_ni_dv", getRandomStringArrayWithInts(20, false), false);
   }
   
   // DoublePointField
@@ -178,19 +256,24 @@ public class TestPointFields extends SolrTestCaseJ4 {
     doTestFloatPointFieldExactQuery("number_d");
     doTestFloatPointFieldExactQuery("number_p_d");
     doTestFloatPointFieldExactQuery("number_p_d_mv");
+    doTestFloatPointFieldExactQuery("number_p_d_dv");
+    doTestFloatPointFieldExactQuery("number_p_d_mv_dv");
     doTestFloatPointFieldExactQuery("number_p_d_ni_dv");
     doTestFloatPointFieldExactQuery("number_p_d_ni_ns_dv");
+    doTestFloatPointFieldExactQuery("number_p_d_ni_dv_ns");
     doTestFloatPointFieldExactQuery("number_p_d_ni_mv_dv");
   }
   
   @Test
+  public void testDoublePointFieldNonSearchableExactQuery() throws Exception {
+    doTestFloatPointFieldExactQuery("number_p_d_ni", false);
+    doTestFloatPointFieldExactQuery("number_p_d_ni_ns", false);
+  }
+ 
+  @Test
   public void testDoublePointFieldReturn() throws Exception {
     testPointFieldReturn("number_p_d", "double", new String[]{"0.0", "1.2", "2.5", "3.02", "0.43", "5.2", "6.01", "74.0", "80.0", "9.9"});
-    clearIndex();
-    assertU(commit());
     testPointFieldReturn("number_p_d_dv_ns", "double", new String[]{"0.0", "1.2", "2.5", "3.02", "0.43", "5.2", "6.01", "74.0", "80.0", "9.9"});
-    clearIndex();
-    assertU(commit());
     String[] arr = new String[atLeast(10)];
     for (int i = 0; i < arr.length; i++) {
       double rand = random().nextDouble() * 10;
@@ -207,9 +290,48 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
-  public void testDoublePointFieldSort() throws Exception {
-    String[] arr = getRandomStringArrayWithDoubles(10, true);
-    doTestPointFieldSort("number_p_d", "number_p_d_dv", arr);
+  public void testDoubleFieldNonSearchableRangeQuery() throws Exception {
+    doTestPointFieldNonSearchableRangeQuery("number_p_d_ni", "42.3");
+    doTestPointFieldNonSearchableRangeQuery("number_p_d_ni_ns", "42.3");
+    doTestPointFieldNonSearchableRangeQuery("number_p_d_ni_ns_mv", "42.3", "-66.6");
+  }
+  
+  
+  @Test
+  public void testDoublePointFieldSortAndFunction() throws Exception {
+    final SortedSet<String> regexToTest = dynFieldRegexesForType(DoublePointField.class);
+    final String[] sequential = new String[]{"0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0"};
+    final String[] randstrs = getRandomStringArrayWithDoubles(10, true);
+
+    for (String r : Arrays.asList("*_p_d", "*_p_d_dv", "*_p_d_dv_ns", "*_p_d_ni_dv",
+                                  "*_p_d_ni_dv_ns", "*_p_d_ni_ns_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSort(r.replace("*","number"), sequential);
+      doTestPointFieldSort(r.replace("*","number"), randstrs);
+      // TODO: test some randomly generated (then sorted) arrays (with dups and/or missing values)
+
+      doTestFloatPointFunctionQuery(r.replace("*","number"), "double");
+    }
+    
+    for (String r : Arrays.asList("*_p_d_ni", "*_p_d_ni_ns")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "w/o docValues", "42.34");
+      
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "w/o docValues", "42.34");
+    }
+    
+    for (String r : Arrays.asList("*_p_d_mv", "*_p_d_ni_mv", "*_p_d_ni_mv_dv", "*_p_d_ni_dv_ns_mv",
+                                  "*_p_d_ni_ns_mv", "*_p_d_dv_ns_mv", "*_p_d_mv_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "42.34");
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "42.34", "66.6");
+      
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "42.34");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "42.34", "66.6");
+    }
+    
+    assertEquals("Missing types in the test", Collections.<String>emptySet(), regexToTest);
+    
   }
   
   @Test
@@ -226,11 +348,6 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
 
   @Test
-  public void testDoublePointFunctionQuery() throws Exception {
-    doTestFloatPointFunctionQuery("number_p_d_dv", "number_p_d", "double");
-  }
-  
-  @Test
   public void testDoublePointStats() throws Exception {
     testPointStats("number_p_d", "number_p_d_dv", new String[]{"-10.0", "1.1", "2.2", "3.3", "4.4", "5.5", "6.6", "7.7", "8.8", "9.9"},
         -10.0D, 9.9D, "10", "1", 1E-10D);
@@ -245,6 +362,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
+  public void testDoublePointFieldMultiValuedNonSearchableExactQuery() throws Exception {
+    testPointFieldMultiValuedExactQuery("number_p_d_ni_mv", getRandomStringArrayWithDoubles(20, false), false);
+    testPointFieldMultiValuedExactQuery("number_p_d_ni_ns_mv", getRandomStringArrayWithDoubles(20, false), false);
+  }
+  
+  @Test
   public void testDoublePointFieldMultiValuedReturn() throws Exception {
     testPointFieldMultiValuedReturn("number_p_d_mv", "double", getSequentialStringArrayWithDoubles(20));
     testPointFieldMultiValuedReturn("number_p_d_ni_mv_dv", "double", getSequentialStringArrayWithDoubles(20));
@@ -295,6 +418,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
     testMultiValuedFloatPointFieldsAtomicUpdates("number_p_d_dv_ns_mv", "double");
   }
   
+  @Test
+  public void testDoublePointFieldNotIndexed() throws Exception {
+    doTestFieldNotIndexed("number_p_d_ni", getSequentialStringArrayWithDoubles(10));
+    doTestFieldNotIndexed("number_p_d_ni_mv", getSequentialStringArrayWithDoubles(10));
+  }
+  
   
   private void doTestFloatPointFieldsAtomicUpdates(String field, String type) throws Exception {
     assertU(adoc(sdoc("id", "1", field, "1.1234")));
@@ -331,9 +460,9 @@ public class TestPointFields extends SolrTestCaseJ4 {
   
   @Test
   public void testDoublePointSetQuery() throws Exception {
-    doTestSetQueries("number_p_d", getRandomStringArrayWithDoubles(10, false), false);
-    doTestSetQueries("number_p_d_mv", getRandomStringArrayWithDoubles(10, false), true);
-    doTestSetQueries("number_p_d_ni_dv", getRandomStringArrayWithDoubles(10, false), false);
+    doTestSetQueries("number_p_d", getRandomStringArrayWithDoubles(20, false), false);
+    doTestSetQueries("number_p_d_mv", getRandomStringArrayWithDoubles(20, false), true);
+    doTestSetQueries("number_p_d_ni_dv", getRandomStringArrayWithDoubles(20, false), false);
   }
   
   // Float
@@ -342,19 +471,24 @@ public class TestPointFields extends SolrTestCaseJ4 {
   public void testFloatPointFieldExactQuery() throws Exception {
     doTestFloatPointFieldExactQuery("number_p_f");
     doTestFloatPointFieldExactQuery("number_p_f_mv");
+    doTestFloatPointFieldExactQuery("number_p_f_dv");
+    doTestFloatPointFieldExactQuery("number_p_f_mv_dv");
     doTestFloatPointFieldExactQuery("number_p_f_ni_dv");
     doTestFloatPointFieldExactQuery("number_p_f_ni_ns_dv");
+    doTestFloatPointFieldExactQuery("number_p_f_ni_dv_ns");
     doTestFloatPointFieldExactQuery("number_p_f_ni_mv_dv");
   }
   
   @Test
+  public void testFloatPointFieldNonSearchableExactQuery() throws Exception {
+    doTestFloatPointFieldExactQuery("number_p_f_ni", false);
+    doTestFloatPointFieldExactQuery("number_p_f_ni_ns", false);
+  }
+  
+  @Test
   public void testFloatPointFieldReturn() throws Exception {
     testPointFieldReturn("number_p_f", "float", new String[]{"0.0", "-1.2", "2.5", "3.02", "0.43", "5.2", "6.01", "74.0", "80.0", "9.9"});
-    clearIndex();
-    assertU(commit());
     testPointFieldReturn("number_p_f_dv_ns", "float", new String[]{"0.0", "-1.2", "2.5", "3.02", "0.43", "5.2", "6.01", "74.0", "80.0", "9.9"});
-    clearIndex();
-    assertU(commit());
     String[] arr = new String[atLeast(10)];
     for (int i = 0; i < arr.length; i++) {
       float rand = random().nextFloat() * 10;
@@ -371,9 +505,47 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
-  public void testFloatPointFieldSort() throws Exception {
-    String[] arr = getRandomStringArrayWithFloats(10, true);
-    doTestPointFieldSort("number_p_f", "number_p_f_dv", arr);
+  public void testFloatPointFieldNonSearchableRangeQuery() throws Exception {
+    doTestPointFieldNonSearchableRangeQuery("number_p_f_ni", "42.3");
+    doTestPointFieldNonSearchableRangeQuery("number_p_f_ni_ns", "42.3");
+    doTestPointFieldNonSearchableRangeQuery("number_p_f_ni_ns_mv", "42.3", "-66.6");
+  }
+  
+  @Test
+  public void testFloatPointFieldSortAndFunction() throws Exception {
+    final SortedSet<String> regexToTest = dynFieldRegexesForType(FloatPointField.class);
+    final String[] sequential = new String[]{"0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0"};
+    final String[] randstrs = getRandomStringArrayWithFloats(10, true);
+    
+    for (String r : Arrays.asList("*_p_f", "*_p_f_dv", "*_p_f_dv_ns", "*_p_f_ni_dv",
+                                  "*_p_f_ni_dv_ns", "*_p_f_ni_ns_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSort(r.replace("*","number"), sequential);
+      doTestPointFieldSort(r.replace("*","number"), randstrs);
+      // TODO: test some randomly generated (then sorted) arrays (with dups and/or missing values)
+
+      doTestFloatPointFunctionQuery(r.replace("*","number"), "float");
+    }
+    
+    for (String r : Arrays.asList("*_p_f_ni", "*_p_f_ni_ns")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "w/o docValues", "42.34");
+
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "w/o docValues", "42.34");
+    }
+    
+    for (String r : Arrays.asList("*_p_f_mv", "*_p_f_ni_mv", "*_p_f_ni_mv_dv", "*_p_f_ni_dv_ns_mv",
+                                  "*_p_f_ni_ns_mv", "*_p_f_dv_ns_mv", "*_p_f_mv_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "42.34");
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "42.34", "66.6");
+      
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "42.34");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "42.34", "66.6");
+    }
+    
+    assertEquals("Missing types in the test", Collections.<String>emptySet(), regexToTest);
+
   }
   
   @Test
@@ -390,11 +562,6 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
 
   @Test
-  public void testFloatPointFunctionQuery() throws Exception {
-    doTestFloatPointFunctionQuery("number_p_f_dv", "number_p_f", "float");
-  }
-  
-  @Test
   public void testFloatPointStats() throws Exception {
     testPointStats("number_p_f", "number_p_f_dv", new String[]{"-10.0", "1.1", "2.2", "3.3", "4.4", "5.5", "6.6", "7.7", "8.8", "9.9"},
         -10D, 9.9D, "10", "1", 1E-6D);
@@ -409,6 +576,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
+  public void testFloatPointFieldMultiValuedNonSearchableExactQuery() throws Exception {
+    testPointFieldMultiValuedExactQuery("number_p_f_ni_mv", getRandomStringArrayWithFloats(20, false), false);
+    testPointFieldMultiValuedExactQuery("number_p_f_ni_ns_mv", getRandomStringArrayWithFloats(20, false), false);
+  }
+  
+  @Test
   public void testFloatPointFieldMultiValuedReturn() throws Exception {
     testPointFieldMultiValuedReturn("number_p_f_mv", "float", getSequentialStringArrayWithDoubles(20));
     testPointFieldMultiValuedReturn("number_p_f_ni_mv_dv", "float", getSequentialStringArrayWithDoubles(20));
@@ -462,9 +635,15 @@ public class TestPointFields extends SolrTestCaseJ4 {
 
   @Test
   public void testFloatPointSetQuery() throws Exception {
-    doTestSetQueries("number_p_f", getRandomStringArrayWithFloats(10, false), false);
-    doTestSetQueries("number_p_f_mv", getRandomStringArrayWithFloats(10, false), true);
-    doTestSetQueries("number_p_f_ni_dv", getRandomStringArrayWithFloats(10, false), false);
+    doTestSetQueries("number_p_f", getRandomStringArrayWithFloats(20, false), false);
+    doTestSetQueries("number_p_f_mv", getRandomStringArrayWithFloats(20, false), true);
+    doTestSetQueries("number_p_f_ni_dv", getRandomStringArrayWithFloats(20, false), false);
+  }
+  
+  @Test
+  public void testFloatPointFieldNotIndexed() throws Exception {
+    doTestFieldNotIndexed("number_p_f_ni", getSequentialStringArrayWithDoubles(10));
+    doTestFieldNotIndexed("number_p_f_ni_mv", getSequentialStringArrayWithDoubles(10));
   }
   
   // Long
@@ -473,16 +652,23 @@ public class TestPointFields extends SolrTestCaseJ4 {
   public void testLongPointFieldExactQuery() throws Exception {
     doTestIntPointFieldExactQuery("number_p_l", true);
     doTestIntPointFieldExactQuery("number_p_l_mv", true);
+    doTestIntPointFieldExactQuery("number_p_l_dv", true);
+    doTestIntPointFieldExactQuery("number_p_l_mv_dv", true);
     doTestIntPointFieldExactQuery("number_p_l_ni_dv", true);
     doTestIntPointFieldExactQuery("number_p_l_ni_ns_dv", true);
+    doTestIntPointFieldExactQuery("number_p_l_ni_dv_ns", true);
     doTestIntPointFieldExactQuery("number_p_l_ni_mv_dv", true);
   }
   
   @Test
+  public void testLongPointFieldNonSearchableExactQuery() throws Exception {
+    doTestIntPointFieldExactQuery("number_p_l_ni", true, false);
+    doTestIntPointFieldExactQuery("number_p_l_ni_ns", true, false);
+  }
+  
+  @Test
   public void testLongPointFieldReturn() throws Exception {
     testPointFieldReturn("number_p_l", "long", new String[]{"0", "-1", "2", "3", "43", "52", "-60", "74", "80", "99", String.valueOf(Long.MAX_VALUE)});
-    clearIndex();
-    assertU(commit());
     testPointFieldReturn("number_p_l_dv_ns", "long", new String[]{"0", "-1", "2", "3", "43", "52", "-60", "74", "80", "99", String.valueOf(Long.MAX_VALUE)});
   }
   
@@ -494,10 +680,45 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
-  public void testLongPointFieldSort() throws Exception {
-    doTestPointFieldSort("number_p_l", "number_p_l_dv", new String[]{String.valueOf(Integer.MIN_VALUE), 
-        "1", "2", "3", "4", "5", "6", "7", 
-        String.valueOf(Integer.MAX_VALUE), String.valueOf(Long.MAX_VALUE)});
+  public void testLongPointFieldNonSearchableRangeQuery() throws Exception {
+    doTestPointFieldNonSearchableRangeQuery("number_p_l_ni", "3333333333");
+    doTestPointFieldNonSearchableRangeQuery("number_p_l_ni_ns", "3333333333");
+    doTestPointFieldNonSearchableRangeQuery("number_p_l_ni_ns_mv", "3333333333", "-4444444444");
+  }
+
+  @Test
+  public void testLongPointFieldSortAndFunction() throws Exception {
+    final SortedSet<String> regexToTest = dynFieldRegexesForType(LongPointField.class);
+    final String[] vals = new String[]{ String.valueOf(Integer.MIN_VALUE), 
+                                        "1", "2", "3", "4", "5", "6", "7", 
+                                        String.valueOf(Integer.MAX_VALUE), String.valueOf(Long.MAX_VALUE)};
+    
+    for (String r : Arrays.asList("*_p_l", "*_p_l_dv", "*_p_l_dv_ns", "*_p_l_ni_dv",
+                                  "*_p_l_ni_dv_ns", "*_p_l_ni_ns_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSort(r.replace("*","number"), vals);
+      // TODO: test some randomly generated (then sorted) arrays (with dups and/or missing values)
+
+      doTestIntPointFunctionQuery(r.replace("*","number"), "long");
+    }
+    
+    for (String r : Arrays.asList("*_p_l_ni", "*_p_l_ni_ns")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "w/o docValues", "4234");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "w/o docValues", "4234");
+    }
+    
+    for (String r : Arrays.asList("*_p_l_mv", "*_p_l_ni_mv", "*_p_l_ni_mv_dv", "*_p_l_ni_dv_ns_mv",
+                                  "*_p_l_ni_ns_mv", "*_p_l_dv_ns_mv", "*_p_l_mv_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "4234");
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "4234", "66666666");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "4234");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "4234", "66666666");
+    }
+    
+    assertEquals("Missing types in the test", Collections.<String>emptySet(), regexToTest);
+
   }
   
   @Test
@@ -514,11 +735,6 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
-  public void testLongPointFunctionQuery() throws Exception {
-    doTestIntPointFunctionQuery("number_p_l_dv", "number_p_l", "long");
-  }
-  
-  @Test
   public void testLongPointStats() throws Exception {
     testPointStats("number_p_l", "number_p_l_dv", new String[]{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"},
         0D, 9D, "10", "1", 0D);
@@ -533,6 +749,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
   
   @Test
+  public void testLongPointFieldMultiValuedNonSearchableExactQuery() throws Exception {
+    testPointFieldMultiValuedExactQuery("number_p_l_ni_mv", getSequentialStringArrayWithInts(20), false);
+    testPointFieldMultiValuedExactQuery("number_p_l_ni_ns_mv", getSequentialStringArrayWithInts(20), false);
+  }
+  
+  @Test
   public void testLongPointFieldMultiValuedReturn() throws Exception {
     testPointFieldMultiValuedReturn("number_p_l_mv", "long", getSequentialStringArrayWithInts(20));
     testPointFieldMultiValuedReturn("number_p_l_ni_mv_dv", "long", getSequentialStringArrayWithInts(20));
@@ -584,9 +806,15 @@ public class TestPointFields extends SolrTestCaseJ4 {
   
   @Test
   public void testLongPointSetQuery() throws Exception {
-    doTestSetQueries("number_p_l", getRandomStringArrayWithLongs(10, false), false);
-    doTestSetQueries("number_p_l_mv", getRandomStringArrayWithLongs(10, false), true);
-    doTestSetQueries("number_p_l_ni_dv", getRandomStringArrayWithLongs(10, false), false);
+    doTestSetQueries("number_p_l", getRandomStringArrayWithLongs(20, false), false);
+    doTestSetQueries("number_p_l_mv", getRandomStringArrayWithLongs(20, false), true);
+    doTestSetQueries("number_p_l_ni_dv", getRandomStringArrayWithLongs(20, false), false);
+  }
+  
+  @Test
+  public void testLongPointFieldNotIndexed() throws Exception {
+    doTestFieldNotIndexed("number_p_l_ni", getSequentialStringArrayWithInts(10));
+    doTestFieldNotIndexed("number_p_l_ni_mv", getSequentialStringArrayWithInts(10));
   }
 
   // Date
@@ -595,18 +823,24 @@ public class TestPointFields extends SolrTestCaseJ4 {
   public void testDatePointFieldExactQuery() throws Exception {
     doTestDatePointFieldExactQuery("number_p_dt", "1995-12-31T23:59:59Z");
     doTestDatePointFieldExactQuery("number_p_dt_mv", "2015-12-31T23:59:59Z-1DAY");
+    doTestDatePointFieldExactQuery("number_p_dt_dv", "2000-12-31T23:59:59Z+3DAYS");
+    doTestDatePointFieldExactQuery("number_p_dt_mv_dv", "2000-12-31T23:59:59Z+3DAYS");
     doTestDatePointFieldExactQuery("number_p_dt_ni_dv", "2000-12-31T23:59:59Z+3DAYS");
     doTestDatePointFieldExactQuery("number_p_dt_ni_ns_dv", "1995-12-31T23:59:59Z-1MONTH");
     doTestDatePointFieldExactQuery("number_p_dt_ni_mv_dv", "1995-12-31T23:59:59Z+2MONTHS");
   }
+  @Test
+  public void testDatePointFieldNonSearchableExactQuery() throws Exception {
+    doTestDatePointFieldExactQuery("number_p_dt_ni", "1995-12-31T23:59:59Z", false);
+    doTestDatePointFieldExactQuery("number_p_dt_ni_ns", "1995-12-31T23:59:59Z", false);
+
+  }
 
   @Test
   public void testDatePointFieldReturn() throws Exception {
     testPointFieldReturn("number_p_dt", "date",
         new String[]{"1995-12-31T23:59:59Z", "1994-02-28T23:59:59Z",
             "2015-12-31T23:59:59Z", "2000-10-31T23:59:59Z", "1999-12-31T12:59:59Z"});
-    clearIndex();
-    assertU(commit());
     testPointFieldReturn("number_p_dt_dv_ns", "date",
         new String[]{"1995-12-31T23:59:59Z", "1994-02-28T23:59:59Z",
             "2015-12-31T23:59:59Z", "2000-10-31T23:59:59Z", "1999-12-31T12:59:59Z"});
@@ -617,10 +851,48 @@ public class TestPointFields extends SolrTestCaseJ4 {
     doTestDatePointFieldRangeQuery("number_p_dt");
     doTestDatePointFieldRangeQuery("number_p_dt_ni_ns_dv");
   }
+  
+  @Test
+  public void testDatePointFieldNonSearchableRangeQuery() throws Exception {
+    doTestPointFieldNonSearchableRangeQuery("number_p_dt_ni", "1995-12-31T23:59:59Z");
+    doTestPointFieldNonSearchableRangeQuery("number_p_dt_ni_ns", "1995-12-31T23:59:59Z");
+    doTestPointFieldNonSearchableRangeQuery("number_p_dt_ni_ns_mv", "1995-12-31T23:59:59Z", "2000-10-31T23:59:59Z");
+  }
 
   @Test
-  public void testDatePointFieldSort() throws Exception {
-    doTestPointFieldSort("number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10));
+  public void testDatePointFieldSortAndFunction() throws Exception {
+    final SortedSet<String> regexToTest = dynFieldRegexesForType(DatePointField.class);
+    final String[] sequential = getSequentialStringArrayWithDates(10);
+    
+    for (String r : Arrays.asList("*_p_dt", "*_p_dt_dv", "*_p_dt_dv_ns", "*_p_dt_ni_dv",
+                                  "*_p_dt_ni_dv_ns", "*_p_dt_ni_ns_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSort(r.replace("*","number"), sequential);
+      // TODO: test some randomly generated (then sorted) arrays (with dups and/or missing values)
+
+      doTestDatePointFunctionQuery(r.replace("*","number"), "date");
+    }
+    
+    for (String r : Arrays.asList("*_p_dt_ni", "*_p_dt_ni_ns")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "w/o docValues", "1995-12-31T23:59:59Z");
+      
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "w/o docValues", "1995-12-31T23:59:59Z");
+    }
+    
+    for (String r : Arrays.asList("*_p_dt_mv", "*_p_dt_ni_mv", "*_p_dt_ni_mv_dv", "*_p_dt_ni_dv_ns_mv",
+                                  "*_p_dt_ni_ns_mv", "*_p_dt_dv_ns_mv", "*_p_dt_mv_dv")) {
+      assertTrue(r, regexToTest.remove(r));
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "1995-12-31T23:59:59Z");
+      doTestPointFieldSortError(r.replace("*","number"), "multivalued", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z");
+      
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "1995-12-31T23:59:59Z");
+      doTestPointFieldFunctionQueryError(r.replace("*","number"), "multivalued", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z");
+                                
+    }
+    
+    assertEquals("Missing types in the test", Collections.<String>emptySet(), regexToTest);
+
   }
 
   @Test
@@ -637,11 +909,6 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
 
   @Test
-  public void testDatePointFunctionQuery() throws Exception {
-    doTestDatePointFunctionQuery("number_p_dt_dv", "number_p_dt", "date");
-  }
-
-  @Test
   public void testDatePointStats() throws Exception {
     testDatePointStats("number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10));
     testDatePointStats("number_p_dt_mv", "number_p_dt_mv_dv", getSequentialStringArrayWithDates(10));
@@ -654,6 +921,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
   }
 
   @Test
+  public void testDatePointFieldMultiValuedNonSearchableExactQuery() throws Exception {
+    testPointFieldMultiValuedExactQuery("number_p_dt_ni_mv", getSequentialStringArrayWithDates(20), false);
+    testPointFieldMultiValuedExactQuery("number_p_dt_ni_ns_mv", getSequentialStringArrayWithDates(20), false);
+  }
+  
+  @Test
   public void testDatePointFieldMultiValuedReturn() throws Exception {
     testPointFieldMultiValuedReturn("number_p_dt_mv", "date", getSequentialStringArrayWithDates(20));
     testPointFieldMultiValuedReturn("number_p_dt_ni_mv_dv", "date", getSequentialStringArrayWithDates(20));
@@ -704,9 +977,16 @@ public class TestPointFields extends SolrTestCaseJ4 {
 
   @Test
   public void testDatePointSetQuery() throws Exception {
-    doTestSetQueries("number_p_dt", getRandomStringArrayWithDates(10, false), false);
-    doTestSetQueries("number_p_dt_mv", getRandomStringArrayWithDates(10, false), true);
-    doTestSetQueries("number_p_dt_ni_dv", getRandomStringArrayWithDates(10, false), false);
+    doTestSetQueries("number_p_dt", getRandomStringArrayWithDates(20, false), false);
+    doTestSetQueries("number_p_dt_mv", getRandomStringArrayWithDates(20, false), true);
+    doTestSetQueries("number_p_dt_ni_dv", getRandomStringArrayWithDates(20, false), false);
+  }
+  
+  
+  @Test
+  public void testDatePointFieldNotIndexed() throws Exception {
+    doTestFieldNotIndexed("number_p_dt_ni", getSequentialStringArrayWithDates(10));
+    doTestFieldNotIndexed("number_p_dt_ni_mv", getSequentialStringArrayWithDates(10));
   }
   
   @Test
@@ -730,7 +1010,42 @@ public class TestPointFields extends SolrTestCaseJ4 {
     }
   }
   
+  public void testInternals() throws IOException {
+    String[] types = new String[]{"i", "l", "f", "d"};
+    String[] suffixes = new String[]{"", "_dv", "_mv", "_mv_dv", "_ni", "_ni_dv", "_ni_dv_ns", "_ni_dv_ns_mv", "_ni_mv", "_ni_mv_dv", "_ni_ns", "_ni_ns_mv", "_dv_ns", "_ni_ns_dv", "_dv_ns_mv"};
+    Set<String> typesTested = new HashSet<>();
+    for (String type:types) {
+      for (String suffix:suffixes) {
+        doTestInternals("number_p_" + type + suffix, getSequentialStringArrayWithInts(10));
+        typesTested.add("*_p_" + type + suffix);
+      }
+    }
+    for (String suffix:suffixes) {
+      doTestInternals("number_p_dt" + suffix, getSequentialStringArrayWithDates(10));
+      typesTested.add("*_p_dt" + suffix);
+    }
+
+    assertEquals("Missing types in the test", dynFieldRegexesForType(PointField.class), typesTested);
+  }
+  
   // Helper methods
+
+  /**
+   * Given a FieldType, return the list of DynamicField 'regexes' for all declared 
+   * DynamicFields that use that FieldType.
+   *
+   * @see IndexSchema#getDynamicFields
+   * @see DynamicField#getRegex
+   */
+  private static SortedSet<String> dynFieldRegexesForType(final Class<? extends FieldType> clazz) {
+    SortedSet<String> typesToTest = new TreeSet<>();
+    for (DynamicField dynField : h.getCore().getLatestSchema().getDynamicFields()) {
+      if (clazz.isInstance(dynField.getPrototype().getType())) {
+        typesToTest.add(dynField.getRegex());
+      }
+    }
+    return typesToTest;
+  }
   
   private String[] getRandomStringArrayWithDoubles(int length, boolean sorted) {
     Set<Double> set;
@@ -870,24 +1185,61 @@ public class TestPointFields extends SolrTestCaseJ4 {
     return stringArr;
   }
   
-  private void doTestIntPointFieldExactQuery(String field, boolean testLong) throws Exception {
+  private void doTestFieldNotIndexed(String field, String[] values) throws IOException {
+    assert values.length == 10;
+    // test preconditions
+    SchemaField sf = h.getCore().getLatestSchema().getField(field);
+    assertFalse("Field should be indexed=false", sf.indexed());
+    assertFalse("Field should be docValues=false", sf.hasDocValues());
+    
+    for (int i=0; i < 10; i++) {
+      assertU(adoc("id", String.valueOf(i), field, values[i]));
+    }
+    assertU(commit());
+    assertQ(req("q", "*:*"), "//*[@numFound='10']");
+    assertQ("Can't search on index=false docValues=false field", req("q", field + ":[* TO *]"), "//*[@numFound='0']");
+    IndexReader ir;
+    RefCounted<SolrIndexSearcher> ref = null;
+    try {
+      ref = h.getCore().getSearcher();
+      ir = ref.get().getIndexReader();
+      assertEquals("Field " + field + " should have no point values", 0, PointValues.size(ir, field));
+    } finally {
+      ref.decref();
+    }
+  }
+  
+   
+  private void doTestIntPointFieldExactQuery(final String field, final boolean testLong) throws Exception {
+    doTestIntPointFieldExactQuery(field, testLong, true);
+  }
+
+  /**
+   * @param field the field to use for indexing and searching against
+   * @param testLong set to true if "field" is expected to support long values, false if only integers
+   * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0
+   */
+  private void doTestIntPointFieldExactQuery(final String field, final boolean testLong, final boolean searchable) throws Exception {
+    final String MATCH_ONE = "//*[@numFound='" + (searchable ? "1" : "0") + "']";
+    final String MATCH_TWO = "//*[@numFound='" + (searchable ? "2" : "0") + "']";
+
     for (int i=0; i < 10; i++) {
       assertU(adoc("id", String.valueOf(i), field, String.valueOf(i+1)));
     }
     assertU(commit());
     for (int i = 0; i < 10; i++) {
       assertQ(req("q", field + ":"+(i+1), "fl", "id, " + field), 
-          "//*[@numFound='1']");
+          MATCH_ONE);
     }
     
     for (int i = 0; i < 10; i++) {
-      assertQ(req("q", field + ":" + (i+1) + " OR " + field + ":" + ((i+1)%10 + 1)), "//*[@numFound='2']");
+      assertQ(req("debug", "true", "q", field + ":" + (i+1) + " OR " + field + ":" + ((i+1)%10 + 1)), MATCH_TWO);
     }
     
     assertU(adoc("id", String.valueOf(Integer.MAX_VALUE), field, String.valueOf(Integer.MAX_VALUE)));
     assertU(commit());
     assertQ(req("q", field + ":"+Integer.MAX_VALUE, "fl", "id, " + field), 
-        "//*[@numFound='1']");
+        MATCH_ONE);
     
     if (testLong) {
       for (long i = (long)Integer.MAX_VALUE; i < (long)Integer.MAX_VALUE + 10; i++) {
@@ -896,12 +1248,12 @@ public class TestPointFields extends SolrTestCaseJ4 {
       assertU(commit());
       for (long i = (long)Integer.MAX_VALUE; i < (long)Integer.MAX_VALUE + 10; i++) {
         assertQ(req("q", field + ":"+(i+1), "fl", "id, " + field), 
-            "//*[@numFound='1']");
+                MATCH_ONE);
       }
       assertU(adoc("id", String.valueOf(Long.MAX_VALUE), field, String.valueOf(Long.MAX_VALUE)));
       assertU(commit());
       assertQ(req("q", field + ":"+Long.MAX_VALUE, "fl", "id, " + field), 
-          "//*[@numFound='1']");
+              MATCH_ONE);
     }
     
     clearIndex();
@@ -925,8 +1277,8 @@ public class TestPointFields extends SolrTestCaseJ4 {
     assertU(commit());
     String[] expected = new String[values.length + 1];
     expected[0] = "//*[@numFound='" + values.length + "']"; 
-    for (int i = 1; i <= values.length; i++) {
-      expected[i] = "//result/doc[" + i + "]/" + type + "[@name='" + field + "'][.='" + values[i-1] + "']";
+    for (int i = 0; i < values.length; i++) {
+      expected[i + 1] = "//result/doc[str[@name='id']='" + i + "']/" + type + "[@name='" + field + "'][.='" + values[i] + "']";
     }
     assertQ(req("q", "*:*", "fl", "id, " + field, "rows", String.valueOf(values.length)), expected);
 
@@ -937,6 +1289,21 @@ public class TestPointFields extends SolrTestCaseJ4 {
             "//doc/" + type + "[@name='" + field + "'][.='" + values[i] + "']");
       }
     }
+    clearIndex();
+    assertU(commit());
+  }
+
+  private void doTestPointFieldNonSearchableRangeQuery(String fieldName, String... values) throws Exception {
+    for (int i = 9; i >= 0; i--) {
+      SolrInputDocument doc = sdoc("id", String.valueOf(i));
+      for (String value : values) {
+        doc.addField(fieldName, value);
+      }
+      assertU(adoc(doc));
+    }
+    assertU(commit());
+    assertQ(req("q", fieldName + ":[* TO *]", "fl", "id, " + fieldName, "sort", "id asc"), 
+            "//*[@numFound='0']");
   }
 
   private void doTestIntPointFieldRangeQuery(String fieldName, String type, boolean testLong) throws Exception {
@@ -1119,43 +1486,69 @@ public class TestPointFields extends SolrTestCaseJ4 {
         "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='8'][.='2']",
         "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='-10'][.='0']");
   }
-  
-  private void doTestIntPointFunctionQuery(String dvFieldName, String nonDvFieldName, String type) throws Exception {
+
+  private void doTestIntPointFunctionQuery(String field, String type) throws Exception {
     for (int i = 9; i >= 0; i--) {
-      assertU(adoc("id", String.valueOf(i), dvFieldName, String.valueOf(i), nonDvFieldName, String.valueOf(i)));
+      assertU(adoc("id", String.valueOf(i), field, String.valueOf(i)));
     }
     assertU(commit());
-    assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).getType() instanceof PointField);
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "sort", "product(-1," + dvFieldName + ") asc"), 
-        "//*[@numFound='10']",
-        "//result/doc[1]/" + type + "[@name='" + dvFieldName + "'][.='9']",
-        "//result/doc[2]/" + type + "[@name='" + dvFieldName + "'][.='8']",
-        "//result/doc[3]/" + type + "[@name='" + dvFieldName + "'][.='7']",
-        "//result/doc[10]/" + type + "[@name='" + dvFieldName + "'][.='0']");
+    assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField);
     
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", product(-1," + dvFieldName + ")", "sort", "id asc"), 
+    assertQ(req("q", "*:*", "fl", "id, " + field, "sort", "product(-1," + field + ") asc"), 
         "//*[@numFound='10']",
-        "//result/doc[1]/float[@name='product(-1," + dvFieldName + ")'][.='-0.0']",
-        "//result/doc[2]/float[@name='product(-1," + dvFieldName + ")'][.='-1.0']",
-        "//result/doc[3]/float[@name='product(-1," + dvFieldName + ")'][.='-2.0']",
-        "//result/doc[10]/float[@name='product(-1," + dvFieldName + ")'][.='-9.0']");
+        "//result/doc[1]/" + type + "[@name='" + field + "'][.='9']",
+        "//result/doc[2]/" + type + "[@name='" + field + "'][.='8']",
+        "//result/doc[3]/" + type + "[@name='" + field + "'][.='7']",
+        "//result/doc[10]/" + type + "[@name='" + field + "'][.='0']");
     
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", field(" + dvFieldName + ")", "sort", "id asc"), 
+    assertQ(req("q", "*:*", "fl", "id, " + field + ", product(-1," + field + ")", "sort", "id asc"), 
         "//*[@numFound='10']",
-        "//result/doc[1]/" + type + "[@name='field(" + dvFieldName + ")'][.='0']",
-        "//result/doc[2]/" + type + "[@name='field(" + dvFieldName + ")'][.='1']",
-        "//result/doc[3]/" + type + "[@name='field(" + dvFieldName + ")'][.='2']",
-        "//result/doc[10]/" + type + "[@name='field(" + dvFieldName + ")'][.='9']");
+        "//result/doc[1]/float[@name='product(-1," + field + ")'][.='-0.0']",
+        "//result/doc[2]/float[@name='product(-1," + field + ")'][.='-1.0']",
+        "//result/doc[3]/float[@name='product(-1," + field + ")'][.='-2.0']",
+        "//result/doc[10]/float[@name='product(-1," + field + ")'][.='-9.0']");
     
-    assertFalse(h.getCore().getLatestSchema().getField(nonDvFieldName).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(nonDvFieldName).getType() instanceof PointField);
+    assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "sort", "id asc"), 
+        "//*[@numFound='10']",
+        "//result/doc[1]/" + type + "[@name='field(" + field + ")'][.='0']",
+        "//result/doc[2]/" + type + "[@name='field(" + field + ")'][.='1']",
+        "//result/doc[3]/" + type + "[@name='field(" + field + ")'][.='2']",
+        "//result/doc[10]/" + type + "[@name='field(" + field + ")'][.='9']");
+    
+  }
+
+  /** 
+   * Checks that the specified field can not be used as a value source, even if there are documents 
+   * with (all) the specified values in the index.
+   *
+   * @param field the field name to try and sort on
+   * @param errSubStr substring to look for in the error msg
+   * @param values one or more values to put into the doc(s) in the index - may be more then one for multivalued fields
+   */
+  private void doTestPointFieldFunctionQueryError(String field, String errSubStr, String...values) throws Exception {
+    final int numDocs = atLeast(random(), 10);
+    for (int i = 0; i < numDocs; i++) {
+      SolrInputDocument doc = sdoc("id", String.valueOf(i));
+      for (String v: values) {
+        doc.addField(field, v);
+      }
+      assertU(adoc(doc));
+    }
 
-    assertQEx("Expecting Exception", 
-        "sort param could not be parsed as a query", 
-        req("q", "*:*", "fl", "id, " + nonDvFieldName, "sort", "product(-1," + nonDvFieldName + ") asc"), 
-        SolrException.ErrorCode.BAD_REQUEST);
+    assertQEx("Should not be able to use field in function: " + field, errSubStr,
+              req("q", "*:*", "fl", "id", "fq", "{!frange l=0 h=100}product(-1, " + field + ")"), 
+              SolrException.ErrorCode.BAD_REQUEST);
+    
+    clearIndex();
+    assertU(commit());
+    
+    // empty index should (also) give same error
+    assertQEx("Should not be able to use field in function: " + field, errSubStr,
+              req("q", "*:*", "fl", "id", "fq", "{!frange l=0 h=100}product(-1, " + field + ")"), 
+              SolrException.ErrorCode.BAD_REQUEST);
+    
   }
+
   
   private void testPointStats(String field, String dvField, String[] numbers, double min, double max, String count, String missing, double delta) {
     String minMin = String.valueOf(min - Math.abs(delta*min));
@@ -1185,8 +1578,23 @@ public class TestPointFields extends SolrTestCaseJ4 {
         req("q", "*:*", "fl", "id, " + field, "stats", "true", "stats.field", field), 
         SolrException.ErrorCode.BAD_REQUEST);
   }
-  
-  private void testPointFieldMultiValuedExactQuery(String fieldName, String[] numbers) throws Exception {
+
+
+  private void testPointFieldMultiValuedExactQuery(final String fieldName, final String[] numbers) throws Exception {
+    testPointFieldMultiValuedExactQuery(fieldName, numbers, true);
+  }
+
+  /**
+   * @param fieldName the field to use for indexing and searching against
+   * @param numbers list of 20 values to index in 10 docs (pairwise)
+   * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0
+   */
+  private void testPointFieldMultiValuedExactQuery(final String fieldName, final String[] numbers,
+                                                   final boolean searchable) throws Exception {
+    
+    final String MATCH_ONE = "//*[@numFound='" + (searchable ? "1" : "0") + "']";
+    final String MATCH_TWO = "//*[@numFound='" + (searchable ? "2" : "0") + "']";
+    
     assert numbers != null && numbers.length == 20;
     assertTrue(h.getCore().getLatestSchema().getField(fieldName).multiValued());
     assertTrue(h.getCore().getLatestSchema().getField(fieldName).getType() instanceof PointField);
@@ -1197,18 +1605,20 @@ public class TestPointFields extends SolrTestCaseJ4 {
     for (int i = 0; i < 20; i++) {
       if (h.getCore().getLatestSchema().getField(fieldName).getType() instanceof DatePointField) {
         assertQ(req("q", fieldName + ":\"" + numbers[i] + "\""),
-            "//*[@numFound='1']");
+                MATCH_ONE);
       } else {
         assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-")),
-            "//*[@numFound='1']");
+                MATCH_ONE);
       }
     }
     
     for (int i = 0; i < 20; i++) {
       if (h.getCore().getLatestSchema().getField(fieldName).getType() instanceof DatePointField) {
-        assertQ(req("q", fieldName + ":\"" + numbers[i] + "\"" + " OR " + fieldName + ":\"" + numbers[(i+1)%10]+"\""), "//*[@numFound='2']");
+        assertQ(req("q", fieldName + ":\"" + numbers[i] + "\"" + " OR " + fieldName + ":\"" + numbers[(i+1)%10]+"\""),
+                MATCH_TWO);
       } else {
-        assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-") + " OR " + fieldName + ":" + numbers[(i+1)%10].replace("-", "\\-")), "//*[@numFound='2']");
+        assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-") + " OR " + fieldName + ":" + numbers[(i+1)%10].replace("-", "\\-")),
+                MATCH_TWO);
       }
     }
   }
@@ -1248,7 +1658,7 @@ public class TestPointFields extends SolrTestCaseJ4 {
     }
     assertQ(req("q", "*:*", "fl", "id, " + fieldName, "sort","id asc"), expected);
   }
-  
+
   private void testPointFieldMultiValuedRangeQuery(String fieldName, String type, String[] numbers) throws Exception {
     assert numbers != null && numbers.length == 20;
     assertTrue(h.getCore().getLatestSchema().getField(fieldName).multiValued());
@@ -1605,20 +2015,32 @@ public class TestPointFields extends SolrTestCaseJ4 {
         "//result/doc[1]/" + type + "[@name='" + field + "'][.='3']");
   }
 
-  private void doTestFloatPointFieldExactQuery(String field) throws Exception {
+  
+  private void doTestFloatPointFieldExactQuery(final String field) throws Exception {
+    doTestFloatPointFieldExactQuery(field, true);
+  }
+  /**
+   * @param field the field to use for indexing and searching against
+   * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0
+   */
+  private void doTestFloatPointFieldExactQuery(String field, final boolean searchable) throws Exception {
+    final String MATCH_ONE = "//*[@numFound='" + (searchable ? "1" : "0") + "']";
+    final String MATCH_TWO = "//*[@numFound='" + (searchable ? "2" : "0") + "']";
+    
     for (int i=0; i < 10; i++) {
       assertU(adoc("id", String.valueOf(i), field, String.valueOf(i + "." + i)));
     }
     assertU(commit());
     for (int i = 0; i < 9; i++) {
       assertQ(req("q", field + ":"+(i+1) + "." + (i+1), "fl", "id, " + field), 
-          "//*[@numFound='1']");
+              MATCH_ONE);
     }
     
     for (int i = 0; i < 9; i++) {
       String num1 = (i+1) + "." + (i+1);
       String num2 = ((i+1)%9 + 1) + "." + ((i+1)%9 + 1);
-      assertQ(req("q", field + ":" + num1 + " OR " + field + ":" + num2), "//*[@numFound='2']");
+      assertQ(req("q", field + ":" + num1 + " OR " + field + ":" + num2),
+              MATCH_TWO);
     }
     
     clearIndex();
@@ -1628,35 +2050,91 @@ public class TestPointFields extends SolrTestCaseJ4 {
       assertU(adoc("id", "random_number ", field, String.valueOf(rand))); //always the same id to override
       assertU(commit());
       assertQ(req("q", field + ":" + rand, "fl", "id, " + field), 
-          "//*[@numFound='1']");
+              MATCH_ONE);
     }
     clearIndex();
     assertU(commit());
   }
-  
-  private void doTestPointFieldSort(String field, String dvField, String[] arr) throws Exception {
-    assert arr != null && arr.length == 10;
-    for (int i = arr.length-1; i >= 0; i--) {
-      assertU(adoc("id", String.valueOf(i), dvField, String.valueOf(arr[i]), field, String.valueOf(arr[i])));
+
+  /**
+   * For each value, creates a doc with that value in the specified field and then asserts that
+   * asc/desc sorts on that field succeeds and that the docs are in the (relatively) expected order
+   *
+   * @param field name of field to sort on
+   * @param values list of values in ascending order
+   */
+  private void doTestPointFieldSort(String field, String... values) throws Exception {
+    assert values != null && 2 <= values.length;
+
+    // TODO: need to add sort missing coverage...
+    //
+    // idea: accept "null" as possible value for sort missing tests ?
+    //
+    // need to account for possibility that multiple nulls will be in non deterministic order
+    // always using secondary sort on id seems prudent ... handles any "dups" in values[]
+    
+    final List<SolrInputDocument> docs = new ArrayList<>(values.length);
+    final String[] ascXpathChecks = new String[values.length + 1];
+    final String[] descXpathChecks = new String[values.length + 1];
+    ascXpathChecks[values.length] = "//*[@numFound='" + values.length + "']";
+    descXpathChecks[values.length] = "//*[@numFound='" + values.length + "']";
+    
+    for (int i = values.length-1; i >= 0; i--) {
+      docs.add(sdoc("id", String.valueOf(i), field, String.valueOf(values[i])));
+      // reminder: xpath array indexes start at 1
+      ascXpathChecks[i]= "//result/doc["+ (1 + i)+"]/str[@name='id'][.='"+i+"']";
+      descXpathChecks[i]= "//result/doc["+ (values.length - i) +"]/str[@name='id'][.='"+i+"']";
+    }
+    
+    // ensure doc add order doesn't affect results
+    Collections.shuffle(docs, random());
+    for (SolrInputDocument doc : docs) {
+      assertU(adoc(doc));
     }
     assertU(commit());
-    assertTrue(h.getCore().getLatestSchema().getField(dvField).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(dvField).getType() instanceof PointField);
-    assertQ(req("q", "*:*", "fl", "id", "sort", dvField + " desc"), 
-        "//*[@numFound='10']",
-        "//result/doc[1]/str[@name='id'][.='9']",
-        "//result/doc[2]/str[@name='id'][.='8']",
-        "//result/doc[3]/str[@name='id'][.='7']",
-        "//result/doc[10]/str[@name='id'][.='0']");
+
+    assertQ(req("q", "*:*", "fl", "id", "sort", field + " asc"), 
+            ascXpathChecks);
+    assertQ(req("q", "*:*", "fl", "id", "sort", field + " desc"), 
+            descXpathChecks);
+
+        
+    clearIndex();
+    assertU(commit());
+  }
+
+
+  /** 
+   * Checks that the specified field can not be sorted on, even if there are documents 
+   * with (all) the specified values in the index.
+   *
+   * @param field the field name to try and sort on
+   * @param errSubStr substring to look for in the error msg
+   * @param values one or more values to put into the doc(s) in the index - may be more then one for multivalued fields
+   */
+  private void doTestPointFieldSortError(String field, String errSubStr, String... values) throws Exception {
+
+    final int numDocs = atLeast(random(), 10);
+    for (int i = 0; i < numDocs; i++) {
+      SolrInputDocument doc = sdoc("id", String.valueOf(i));
+      for (String v: values) {
+        doc.addField(field, v);
+      }
+      assertU(adoc(doc));
+    }
+
+    assertQEx("Should not be able to sort on field: " + field, errSubStr,
+              req("q", "*:*", "fl", "id", "sort", field + " desc"), 
+              SolrException.ErrorCode.BAD_REQUEST);
     
-    assertFalse(h.getCore().getLatestSchema().getField(field).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField);
-    assertQEx("Expecting Exception", 
-        "can not sort on a PointField without doc values: " + field, 
-        req("q", "*:*", "fl", "id", "sort", field + " desc"), 
-        SolrException.ErrorCode.BAD_REQUEST);
+    clearIndex();
+    assertU(commit());
+    
+    // empty index should (also) give same error
+    assertQEx("Should not be able to sort on field: " + field, errSubStr,
+              req("q", "*:*", "fl", "id", "sort", field + " desc"), 
+              SolrException.ErrorCode.BAD_REQUEST);
     
-    //TODO: sort missing
   }
   
   private void doTestFloatPointFieldRangeQuery(String fieldName, String type, boolean testDouble) throws Exception {
@@ -1786,42 +2264,33 @@ public class TestPointFields extends SolrTestCaseJ4 {
         "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='8.0'][.='2']",
         "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='-10.0'][.='0']");
   }
-  
-  private void doTestFloatPointFunctionQuery(String dvFieldName, String nonDvFieldName, String type) throws Exception {
+
+  private void doTestFloatPointFunctionQuery(String field, String type) throws Exception {
     for (int i = 9; i >= 0; i--) {
-      assertU(adoc("id", String.valueOf(i), dvFieldName, String.format(Locale.ROOT, "%f", (float)i*1.1), nonDvFieldName, String.format(Locale.ROOT, "%f", (float)i*1.1)));
+      assertU(adoc("id", String.valueOf(i), field, String.format(Locale.ROOT, "%f", (float)i*1.1)));
     }
     assertU(commit());
-    assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).getType() instanceof PointField);
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "sort", "product(-1," + dvFieldName + ") asc"), 
+    assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField);
+    assertQ(req("q", "*:*", "fl", "id, " + field, "sort", "product(-1," + field + ") asc"), 
         "//*[@numFound='10']",
-        "//result/doc[1]/" + type + "[@name='" + dvFieldName + "'][.='9.9']",
-        "//result/doc[2]/" + type + "[@name='" + dvFieldName + "'][.='8.8']",
-        "//result/doc[3]/" + type + "[@name='" + dvFieldName + "'][.='7.7']",
-        "//result/doc[10]/" + type + "[@name='" + dvFieldName + "'][.='0.0']");
+        "//result/doc[1]/" + type + "[@name='" + field + "'][.='9.9']",
+        "//result/doc[2]/" + type + "[@name='" + field + "'][.='8.8']",
+        "//result/doc[3]/" + type + "[@name='" + field + "'][.='7.7']",
+        "//result/doc[10]/" + type + "[@name='" + field + "'][.='0.0']");
     
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", product(-1," + dvFieldName + ")", "sort", "id asc"), 
+    assertQ(req("q", "*:*", "fl", "id, " + field + ", product(-1," + field + ")", "sort", "id asc"), 
         "//*[@numFound='10']",
-        "//result/doc[1]/float[@name='product(-1," + dvFieldName + ")'][.='-0.0']",
-        "//result/doc[2]/float[@name='product(-1," + dvFieldName + ")'][.='-1.1']",
-        "//result/doc[3]/float[@name='product(-1," + dvFieldName + ")'][.='-2.2']",
-        "//result/doc[10]/float[@name='product(-1," + dvFieldName + ")'][.='-9.9']");
+        "//result/doc[1]/float[@name='product(-1," + field + ")'][.='-0.0']",
+        "//result/doc[2]/float[@name='product(-1," + field + ")'][.='-1.1']",
+        "//result/doc[3]/float[@name='product(-1," + field + ")'][.='-2.2']",
+        "//result/doc[10]/float[@name='product(-1," + field + ")'][.='-9.9']");
     
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", field(" + dvFieldName + ")", "sort", "id asc"), 
+    assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "sort", "id asc"), 
         "//*[@numFound='10']",
-        "//result/doc[1]/" + type + "[@name='field(" + dvFieldName + ")'][.='0.0']",
-        "//result/doc[2]/" + type + "[@name='field(" + dvFieldName + ")'][.='1.1']",
-        "//result/doc[3]/" + type + "[@name='field(" + dvFieldName + ")'][.='2.2']",
-        "//result/doc[10]/" + type + "[@name='field(" + dvFieldName + ")'][.='9.9']");
-    
-    assertFalse(h.getCore().getLatestSchema().getField(nonDvFieldName).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(nonDvFieldName).getType() instanceof PointField);
-
-    assertQEx("Expecting Exception", 
-        "sort param could not be parsed as a query", 
-        req("q", "*:*", "fl", "id, " + nonDvFieldName, "sort", "product(-1," + nonDvFieldName + ") asc"), 
-        SolrException.ErrorCode.BAD_REQUEST);
+        "//result/doc[1]/" + type + "[@name='field(" + field + ")'][.='0.0']",
+        "//result/doc[2]/" + type + "[@name='field(" + field + ")'][.='1.1']",
+        "//result/doc[3]/" + type + "[@name='field(" + field + ")'][.='2.2']",
+        "//result/doc[10]/" + type + "[@name='field(" + field + ")'][.='9.9']");
   }
   
   private void doTestSetQueries(String fieldName, String[] values, boolean multiValued) {
@@ -1829,7 +2298,8 @@ public class TestPointFields extends SolrTestCaseJ4 {
       assertU(adoc("id", String.valueOf(i), fieldName, values[i]));
     }
     assertU(commit());
-    assertTrue(h.getCore().getLatestSchema().getField(fieldName).getType() instanceof PointField);
+    SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); 
+    assertTrue(sf.getType() instanceof PointField);
     
     for (int i = 0; i < values.length; i++) {
       assertQ(req("q", "{!term f='" + fieldName + "'}" + values[i], "fl", "id," + fieldName), 
@@ -1841,6 +2311,27 @@ public class TestPointFields extends SolrTestCaseJ4 {
           "//*[@numFound='2']");
     }
     
+    assertTrue(values.length > SolrQueryParser.TERMS_QUERY_THRESHOLD);
+    int numTerms = SolrQueryParser.TERMS_QUERY_THRESHOLD + 1;
+    StringBuilder builder = new StringBuilder(fieldName + ":(");
+    for (int i = 0; i < numTerms; i++) {
+      if (sf.getType().getNumberType() == NumberType.DATE) {
+        builder.append(String.valueOf(values[i]).replace(":", "\\:") + ' ');
+      } else {
+        builder.append(String.valueOf(values[i]).replace("-", "\\-") + ' ');
+      }
+    }
+    builder.append(')');
+    if (sf.indexed()) { // SolrQueryParser should also be generating a PointInSetQuery if indexed
+      assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), "fl", "id," + fieldName), 
+          "//*[@numFound='" + numTerms + "']",
+          "//*[@name='parsed_filter_queries']/str[.='(" + getSetQueryToString(fieldName, values, numTerms) + ")']");
+    } else {
+      // Won't use PointInSetQuery if the fiels is not indexed, but should match the same docs
+      assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), "fl", "id," + fieldName), 
+          "//*[@numFound='" + numTerms + "']");
+    }
+
     if (multiValued) {
       clearIndex();
       assertU(commit());
@@ -1860,6 +2351,11 @@ public class TestPointFields extends SolrTestCaseJ4 {
     }
   }
   
+  private String getSetQueryToString(String fieldName, String[] values, int numTerms) {
+    SchemaField sf = h.getCore().getLatestSchema().getField(fieldName);
+    return sf.getType().getSetQuery(null, sf, Arrays.asList(Arrays.copyOf(values, numTerms))).toString();
+  }
+
   private void doTestDoublePointFieldMultiValuedRangeFacet(String docValuesField, String nonDocValuesField) throws Exception {
     for (int i = 0; i < 10; i++) {
       assertU(adoc("id", String.valueOf(i), docValuesField, String.valueOf(i), docValuesField, String.valueOf(i + 10), 
@@ -2008,8 +2504,20 @@ public class TestPointFields extends SolrTestCaseJ4 {
         "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='-10'][.='0']");
   }
 
-
-  private void doTestDatePointFieldExactQuery(String field, String baseDate) throws Exception {
+  
+  private void doTestDatePointFieldExactQuery(final String field, final String baseDate) throws Exception {
+    doTestDatePointFieldExactQuery(field, baseDate, true);
+  }
+  
+  /**
+   * @param field the field to use for indexing and searching against
+   * @param baseDate basic value to use for indexing and searching
+   * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0
+   */
+  private void doTestDatePointFieldExactQuery(final String field, final String baseDate, final boolean searchable) throws Exception {
+    final String MATCH_ONE = "//*[@numFound='" + (searchable ? "1" : "0") + "']";
+    final String MATCH_TWO = "//*[@numFound='" + (searchable ? "2" : "0") + "']";
+    
     for (int i=0; i < 10; i++) {
       assertU(adoc("id", String.valueOf(i), field, String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1)));
     }
@@ -2017,20 +2525,21 @@ public class TestPointFields extends SolrTestCaseJ4 {
     for (int i = 0; i < 10; i++) {
       String date = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1);
       assertQ(req("q", field + ":\""+date+"\"", "fl", "id, " + field),
-          "//*[@numFound='1']");
+              MATCH_ONE);
     }
 
     for (int i = 0; i < 10; i++) {
       String date1 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1);
       String date2 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, ((i+1)%10 + 1));
       assertQ(req("q", field + ":\"" + date1 + "\""
-          + " OR " + field + ":\"" + date2 + "\""), "//*[@numFound='2']");
+                  + " OR " + field + ":\"" + date2 + "\""),
+              MATCH_TWO);
     }
 
     clearIndex();
     assertU(commit());
   }
-
+  
   private void doTestDatePointFieldRangeQuery(String fieldName) throws Exception {
     String baseDate = "1995-12-31T10:59:59Z";
     for (int i = 9; i >= 0; i--) {
@@ -2243,43 +2752,36 @@ public class TestPointFields extends SolrTestCaseJ4 {
         "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField + "']/lst[@name='counts']/int[@name='1994-12-31T10:59:59Z'][.='0']");
   }
 
-  private void doTestDatePointFunctionQuery(String dvFieldName, String nonDvFieldName, String type) throws Exception {
-    String baseDate = "1995-01-10T10:59:10Z";
+  private void doTestDatePointFunctionQuery(String field, String nonDvFieldName) throws Exception {
+    final String baseDate = "1995-01-10T10:59:10Z";
+    
     for (int i = 9; i >= 0; i--) {
       String date = String.format(Locale.ROOT, "%s+%dSECONDS", baseDate, i+1);
-      assertU(adoc("id", String.valueOf(i), dvFieldName, date, nonDvFieldName, date));
+      assertU(adoc("id", String.valueOf(i), field, date));
     }
     assertU(commit());
-    assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).getType() instanceof PointField);
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "sort", "product(-1,ms(" + dvFieldName + ")) asc"),
+    assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof DatePointField);
+    assertQ(req("q", "*:*", "fl", "id, " + field, "sort", "product(-1,ms(" + field + ")) asc"),
         "//*[@numFound='10']",
-        "//result/doc[1]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:20Z']",
-        "//result/doc[2]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:19Z']",
-        "//result/doc[3]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:18Z']",
-        "//result/doc[10]/" + type + "[@name='" + dvFieldName + "'][.='1995-01-10T10:59:11Z']");
+        "//result/doc[1]/date[@name='" + field + "'][.='1995-01-10T10:59:20Z']",
+        "//result/doc[2]/date[@name='" + field + "'][.='1995-01-10T10:59:19Z']",
+        "//result/doc[3]/date[@name='" + field + "'][.='1995-01-10T10:59:18Z']",
+        "//result/doc[10]/date[@name='" + field + "'][.='1995-01-10T10:59:11Z']");
 
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", ms(" + dvFieldName + ","+baseDate+")", "sort", "id asc"),
+    assertQ(req("q", "*:*", "fl", "id, " + field + ", ms(" + field + ","+baseDate+")", "sort", "id asc"),
         "//*[@numFound='10']",
-        "//result/doc[1]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='1000.0']",
-        "//result/doc[2]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='2000.0']",
-        "//result/doc[3]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='3000.0']",
-        "//result/doc[10]/float[@name='ms(" + dvFieldName + "," + baseDate + ")'][.='10000.0']");
+        "//result/doc[1]/float[@name='ms(" + field + "," + baseDate + ")'][.='1000.0']",
+        "//result/doc[2]/float[@name='ms(" + field + "," + baseDate + ")'][.='2000.0']",
+        "//result/doc[3]/float[@name='ms(" + field + "," + baseDate + ")'][.='3000.0']",
+        "//result/doc[10]/float[@name='ms(" + field + "," + baseDate + ")'][.='10000.0']");
 
-    assertQ(req("q", "*:*", "fl", "id, " + dvFieldName + ", field(" + dvFieldName + ")", "sort", "id asc"),
+    assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "sort", "id asc"),
         "//*[@numFound='10']",
-        "//result/doc[1]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:11Z']",
-        "//result/doc[2]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:12Z']",
-        "//result/doc[3]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:13Z']",
-        "//result/doc[10]/" + type + "[@name='field(" + dvFieldName + ")'][.='1995-01-10T10:59:20Z']");
-
-    assertFalse(h.getCore().getLatestSchema().getField(nonDvFieldName).hasDocValues());
-    assertTrue(h.getCore().getLatestSchema().getField(nonDvFieldName).getType() instanceof PointField);
+        "//result/doc[1]/date[@name='field(" + field + ")'][.='1995-01-10T10:59:11Z']",
+        "//result/doc[2]/date[@name='field(" + field + ")'][.='1995-01-10T10:59:12Z']",
+        "//result/doc[3]/date[@name='field(" + field + ")'][.='1995-01-10T10:59:13Z']",
+        "//result/doc[10]/date[@name='field(" + field + ")'][.='1995-01-10T10:59:20Z']");
 
-    assertQEx("Expecting Exception",
-        "sort param could not be parsed as a query",
-        req("q", "*:*", "fl", "id, " + nonDvFieldName, "sort", "product(-1," + nonDvFieldName + ") asc"),
-        SolrException.ErrorCode.BAD_REQUEST);
   }
 
   private void testDatePointStats(String field, String dvField, String[] dates) {
@@ -2362,6 +2864,214 @@ public class TestPointFields extends SolrTestCaseJ4 {
         "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=0");
 
   }
+  
+  private void doTestInternals(String field, String[] values) throws IOException {
+    assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField);
+    for (int i=0; i < 10; i++) {
+      assertU(adoc("id", String.valueOf(i), field, values[i]));
+    }
+    assertU(commit());
+    IndexReader ir;
+    RefCounted<SolrIndexSearcher> ref = null;
+    SchemaField sf = h.getCore().getLatestSchema().getField(field);
+    boolean ignoredField = !(sf.indexed() || sf.stored() || sf.hasDocValues());
+    try {
+      ref = h.getCore().getSearcher();
+      SolrIndexSearcher searcher = ref.get(); 
+      ir = searcher.getIndexReader();
+      // our own SlowCompositeReader to check DocValues on disk w/o the UninvertingReader added by SolrIndexSearcher
+      final LeafReader leafReaderForCheckingDVs = SlowCompositeReaderWrapper.wrap(searcher.getRawReader());
+      
+      if (sf.indexed()) {
+        assertEquals("Field " + field + " should have point values", 10, PointValues.size(ir, field));
+      } else {
+        assertEquals("Field " + field + " should have no point values", 0, PointValues.size(ir, field));
+      }
+      if (ignoredField) {
+        assertTrue("Field " + field + " should not have docValues",
+            DocValues.getSortedNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
+        assertTrue("Field " + field + " should not have docValues", 
+            DocValues.getNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
+        assertTrue("Field " + field + " should not have docValues", 
+            DocValues.getSorted(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
+        assertTrue("Field " + field + " should not have docValues", 
+            DocValues.getBinary(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
+      } else {
+        if (sf.hasDocValues()) {
+          if (sf.multiValued()) {
+            assertFalse("Field " + field + " should have docValues", 
+                DocValues.getSortedNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
+          } else {
+            assertFalse("Field " + field + " should have docValues", 
+                DocValues.getNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
+          }
+        } else {
+          expectThrows(IllegalStateException.class, ()->DocValues.getSortedNumeric(leafReaderForCheckingDVs, field));
+          expectThrows(IllegalStateException.class, ()->DocValues.getNumeric(leafReaderForCheckingDVs, field));
+        }
+        expectThrows(IllegalStateException.class, ()->DocValues.getSorted(leafReaderForCheckingDVs, field));
+        expectThrows(IllegalStateException.class, ()->DocValues.getBinary(leafReaderForCheckingDVs, field));
+      }
+      for (LeafReaderContext leave:ir.leaves()) {
+        LeafReader reader = leave.reader();
+        for (int i = 0; i < reader.numDocs(); i++) {
+          Document doc = reader.document(i);
+          if (sf.stored()) {
+            assertNotNull("Field " + field + " not found. Doc: " + doc, doc.get(field));
+          } else {
+            assertNull(doc.get(field));
+          }
+        }
+      }
+    } finally {
+      ref.decref();
+    }
+    clearIndex();
+    assertU(commit());
+  }
+
+  public void testNonReturnable() throws Exception {
+    
+    doTestReturnNonStored("foo_p_i_ni_ns", false, "42");
+    doTestReturnNonStored("foo_p_i_ni_dv_ns", true, "42");
+    doTestReturnNonStored("foo_p_i_ni_ns_mv", false, "42", "666");
+    doTestReturnNonStored("foo_p_i_ni_dv_ns_mv", true, "42", "666");
+
+    doTestReturnNonStored("foo_p_l_ni_ns", false, "3333333333");
+    doTestReturnNonStored("foo_p_l_ni_dv_ns", true, "3333333333");
+    doTestReturnNonStored("foo_p_l_ni_ns_mv", false, "3333333333", "-4444444444");
+    doTestReturnNonStored("foo_p_l_ni_dv_ns_mv", true, "3333333333", "-4444444444");
+
+    doTestReturnNonStored("foo_p_f_ni_ns", false, "42.3");
+    doTestReturnNonStored("foo_p_f_ni_dv_ns", true, "42.3");
+    doTestReturnNonStored("foo_p_f_ni_ns_mv", false, "42.3", "-66.6");
+    doTestReturnNonStored("foo_p_f_ni_dv_ns_mv", true, "42.3", "-66.6");
+    
+    doTestReturnNonStored("foo_p_d_ni_ns", false, "42.3");
+    doTestReturnNonStored("foo_p_d_ni_dv_ns", true, "42.3");
+    doTestReturnNonStored("foo_p_d_ni_ns_mv", false, "42.3", "-66.6");
+    doTestReturnNonStored("foo_p_d_ni_dv_ns_mv", true, "42.3", "-66.6");
+
+    doTestReturnNonStored("foo_p_dt_ni_ns", false, "1995-12-31T23:59:59Z");
+    doTestReturnNonStored("foo_p_dt_ni_dv_ns", true, "1995-12-31T23:59:59Z");
+    doTestReturnNonStored("foo_p_dt_ni_ns_mv", false, "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z+3DAYS");
+    doTestReturnNonStored("foo_p_dt_ni_dv_ns_mv", true, "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z+3DAYS");
+  }
+
+  public void doTestReturnNonStored(final String fieldName, boolean shouldReturnFieldIfRequested, final String... values) throws Exception {
+    final String RETURN_FIELD = "count(//doc/*[@name='" + fieldName + "'])=10";
+    final String DONT_RETURN_FIELD = "count(//doc/*[@name='" + fieldName + "'])=0";
+    assertFalse(h.getCore().getLatestSchema().getField(fieldName).stored());
+    for (int i=0; i < 10; i++) {
+      SolrInputDocument doc = sdoc("id", String.valueOf(i));
+      for (String value : values) {
+        doc.addField(fieldName, value);
+      }
+      assertU(adoc(doc));
+    }
+    assertU(commit());
+    assertQ(req("q", "*:*", "rows", "100", "fl", "id," + fieldName), 
+            "//*[@numFound='10']",
+            "count(//doc)=10", // exactly 10 docs in response
+            (shouldReturnFieldIfRequested?RETURN_FIELD:DONT_RETURN_FIELD)); // no field in any doc other then 'id'
+
+    assertQ(req("q", "*:*", "rows", "100", "fl", "*"), 
+        "//*[@numFound='10']",
+        "count(//doc)=10", // exactly 10 docs in response
+        DONT_RETURN_FIELD); // no field in any doc other then 'id'
+
+    assertQ(req("q", "*:*", "rows", "100"), 
+        "//*[@numFound='10']",
+        "count(//doc)=10", // exactly 10 docs in response
+        DONT_RETURN_FIELD); // no field in any doc other then 'id'
+    clearIndex();
+    assertU(commit());
+  }
+
+  public void testWhiteboxCreateFields() throws Exception {
+    String[] typeNames = new String[]{"i", "l", "f", "d", "dt"};
+    String[] suffixes = new String[]{"", "_dv", "_mv", "_mv_dv", "_ni", "_ni_dv", "_ni_dv_ns", "_ni_dv_ns_mv", "_ni_mv", "_ni_mv_dv", "_ni_ns", "_ni_ns_mv", "_dv_ns", "_ni_ns_dv", "_dv_ns_mv"};
+    Class<?>[] expectedClasses = new Class[]{IntPoint.class, LongPoint.class, FloatPoint.class, DoublePoint.class, LongPoint.class};
+    
+    Date dateToTest = new Date();
+    Object[][] values = new Object[][] {
+      {42, "42"},
+      {42, "42"},
+      {42.123, "42.123"},
+      {12345.6789, "12345.6789"},
+      {dateToTest, new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT).format(dateToTest), "NOW"} // "NOW" won't be equal to the other dates
+    };
+    
+    Set<String> typesTested = new HashSet<>();
+    for (int i = 0; i < typeNames.length; i++) {
+      for (String suffix:suffixes) {
+        doWhiteboxCreateFields("whitebox_p_" + typeNames[i] + suffix, expectedClasses[i], values[i]);
+        typesTested.add("*_p_" + typeNames[i] + suffix);
+      }
+    }
+    Set<String> typesToTest = new HashSet<>();
+    for (DynamicField dynField:h.getCore().getLatestSchema().getDynamicFields()) {
+      if (dynField.getPrototype().getType() instanceof PointField) {
+        typesToTest.add(dynField.getRegex());
+      }
+    }
+    assertEquals("Missing types in the test", typesTested, typesToTest);
+  }
+  
+  /** 
+   * Calls {@link #callAndCheckCreateFields} on each of the specified values.
+   * This is a convinience method for testing the same fieldname with multiple inputs.
+   *
+   * @see #callAndCheckCreateFields
+   */
+  private void doWhiteboxCreateFields(final String fieldName, final Class<?> pointType, final Object... values) throws Exception {
+    
+    for (Object value : values) {
+      // ideally we should require that all input values be diff forms of the same logical value
+      // (ie '"42"' vs 'new Integer(42)') and assert that each produces an equivilent list of IndexableField objects
+      // but that doesn't seem to work -- appears not all IndexableField classes override Object.equals?
+      final List<IndexableField> result = callAndCheckCreateFields(fieldName, pointType, value);
+      assertNotNull(value + " => null", result);
+    }
+  }
+
+
+  /** 
+   * Calls {@link SchemaField#createFields} on the specified value for the specified field name, and asserts 
+   * that the results match the SchemaField propeties, with an additional check that the <code>pointType</code> 
+   * is included if and only if the SchemaField is "indexed" 
+   */
+  private List<IndexableField> callAndCheckCreateFields(final String fieldName, final Class<?> pointType, final Object value) throws Exception {
+    final SchemaField sf = h.getCore().getLatestSchema().getField(fieldName);
+    final List<IndexableField> results = sf.createFields(value);
+    final Set<IndexableField> resultSet = new LinkedHashSet<>(results);
+    assertEquals("duplicates found in results? " + results.toString(),
+                 results.size(), resultSet.size());
+
+    final Set<Class<?>> resultClasses = new HashSet<>();
+    for (IndexableField f : results) {
+      resultClasses.add(f.getClass());
+      
+      if (!sf.hasDocValues() ) {
+        assertFalse(f.toString(),
+                    (f instanceof NumericDocValuesField) ||
+                    (f instanceof SortedNumericDocValuesField));
+      }
+    }
+    assertEquals(fieldName + " stored? Result Fields: " + Arrays.toString(results.toArray()),
+                 sf.stored(), resultClasses.contains(StoredField.class));
+    assertEquals(fieldName + " indexed? Result Fields: " + Arrays.toString(results.toArray()),
+                 sf.indexed(), resultClasses.contains(pointType));
+    if (sf.multiValued()) {
+      assertEquals(fieldName + " docvalues? Result Fields: " + Arrays.toString(results.toArray()),
+                   sf.hasDocValues(), resultClasses.contains(SortedNumericDocValuesField.class));
+    } else {
+      assertEquals(fieldName + " docvalues? Result Fields: " + Arrays.toString(results.toArray()),
+                   sf.hasDocValues(), resultClasses.contains(NumericDocValuesField.class));
+    }
+
+    return results;
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
index 48a3f22..136d40e 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java
@@ -107,13 +107,14 @@ public class TestUseDocValuesAsStored extends AbstractBadConfigTestBase {
   }
 
   @After
-  private void afterClass() throws Exception {
+  private void afterTest() throws Exception {
+    clearIndex();
+    commit();
     deleteCore();
     System.clearProperty("managed.schema.mutable");
     System.clearProperty("enable.update.log");
   }
 
-
   public String getCoreName() {
     return "basic";
   }
@@ -225,7 +226,6 @@ public class TestUseDocValuesAsStored extends AbstractBadConfigTestBase {
 
   @Test
   public void testMultipleSearchResults() throws Exception {
-
     // Three documents with different numbers of values for a field
     assertU(adoc("id", "myid1", "test_is_dvo", "101", "test_is_dvo", "102", "test_is_dvo", "103"));
     assertU(adoc("id", "myid2", "test_is_dvo", "201", "test_is_dvo", "202"));
@@ -250,6 +250,34 @@ public class TestUseDocValuesAsStored extends AbstractBadConfigTestBase {
             + "{'id':'myid6','test_s_dvo':'hello'}"
             + "]");
   }
+  
+  @Test
+  public void testUseDocValuesAsStoredFalse() throws Exception {
+    SchemaField sf = h.getCore().getLatestSchema().getField("nonstored_dv_str");
+    assertNotNull(sf);
+    assertTrue(sf.hasDocValues());
+    assertFalse(sf.useDocValuesAsStored());
+    assertFalse(sf.stored());
+    assertU(adoc("id", "myid", "nonstored_dv_str", "dont see me"));
+    assertU(commit());
+    
+    assertJQ(req("q", "id:myid"),
+        "/response/docs==["
+            + "{'id':'myid'}"
+            + "]");
+    assertJQ(req("q", "id:myid", "fl", "*"),
+        "/response/docs==["
+            + "{'id':'myid'}"
+            + "]");
+    assertJQ(req("q", "id:myid", "fl", "id,nonstored_dv_*"),
+        "/response/docs==["
+            + "{'id':'myid'}"
+            + "]");
+    assertJQ(req("q", "id:myid", "fl", "id,nonstored_dv_str"),
+        "/response/docs==["
+            + "{'id':'myid','nonstored_dv_str':'dont see me'}"
+            + "]");
+  }
 
   public void testManagedSchema() throws Exception {
     IndexSchema oldSchema = h.getCore().getLatestSchema();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java b/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java
index 1539dfd..874b21a 100644
--- a/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java
+++ b/solr/core/src/test/org/apache/solr/search/MockSearchComponent.java
@@ -46,10 +46,4 @@ public class MockSearchComponent extends SearchComponent {
   public String getDescription() {
     return "Mock search component for tests";
   }
-
-  @Override
-  public String getSource() {
-    return "";
-  }
-  
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
index a9b5c6e..22d7586 100644
--- a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
+++ b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
@@ -1099,7 +1099,8 @@ public class QueryEqualityTest extends SolrTestCaseJ4 {
     assertFuncEquals("agg_hll(foo_i)", "agg_hll(foo_i)");
     assertFuncEquals("agg_sumsq(foo_i)", "agg_sumsq(foo_i)");
     assertFuncEquals("agg_percentile(foo_i,50)", "agg_percentile(foo_i,50)");
-    // assertFuncEquals("agg_stdev(foo_i)", "agg_stdev(foo_i)");
+    assertFuncEquals("agg_variance(foo_i)", "agg_variance(foo_i)");
+    assertFuncEquals("agg_stddev(foo_i)", "agg_stddev(foo_i)");
     // assertFuncEquals("agg_multistat(foo_i)", "agg_multistat(foo_i)");
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
index c4d8bec..6c7b0cf 100644
--- a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
@@ -93,6 +93,7 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
     assertU(adoc("id", "70", "text_sw", "hair"));
     assertU(adoc("id", "71", "text_sw", "ties"));
     assertU(adoc("id", "72", "text_sw", "wifi ATM"));
+    assertU(adoc("id", "73", "shingle23", "A B X D E"));
     assertU(commit());
   }
 
@@ -1946,4 +1947,21 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
       }
     }
   }
+
+  @Test
+  public void testShingleQueries() throws Exception {
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.add("sow", "false");
+    params.add("defType", "edismax");
+
+    try (SolrQueryRequest req = req(params)) {
+      QParser qParser = QParser.getParser("shingle23:(A B C)", req);
+      Query q = qParser.getQuery();
+      assertEquals("Synonym(shingle23:A_B shingle23:A_B_C) shingle23:B_C", q.toString());
+    }
+
+    assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false")
+        , "/response/numFound==1"
+    );
+  }
 }


[12/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
index ed85604..049d228 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
@@ -39,7 +39,7 @@ import org.apache.solr.common.util.SuppressForbidden;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.schema.IndexSchema;
 import org.slf4j.Logger;
@@ -126,7 +126,7 @@ public class SolrIndexWriter extends IndexWriter {
     infoStream = getConfig().getInfoStream();
     this.directory = directory;
     numOpens.incrementAndGet();
-    SolrMetricManager metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
+    SolrMetricManager metricManager = core.getCoreContainer().getMetricManager();
     String registry = core.getCoreMetricManager().getRegistryName();
     if (config.metricsInfo != null && config.metricsInfo.initArgs != null) {
       Object v = config.metricsInfo.initArgs.get("majorMergeDocs");
@@ -151,20 +151,20 @@ public class SolrIndexWriter extends IndexWriter {
       }
       if (mergeDetails) {
         mergeTotals = true; // override
-        majorMergedDocs = metricManager.meter(registry, "docs", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
-        majorDeletedDocs = metricManager.meter(registry, "deletedDocs", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
+        majorMergedDocs = metricManager.meter(null, registry, "docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        majorDeletedDocs = metricManager.meter(null, registry, "deletedDocs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
       }
       if (mergeTotals) {
-        minorMerge = metricManager.timer(registry, "minor", SolrInfoMBean.Category.INDEX.toString(), "merge");
-        majorMerge = metricManager.timer(registry, "major", SolrInfoMBean.Category.INDEX.toString(), "merge");
-        mergeErrors = metricManager.counter(registry, "errors", SolrInfoMBean.Category.INDEX.toString(), "merge");
-        metricManager.registerGauge(registry, () -> runningMajorMerges.get(), true, "running", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
-        metricManager.registerGauge(registry, () -> runningMinorMerges.get(), true, "running", SolrInfoMBean.Category.INDEX.toString(), "merge", "minor");
-        metricManager.registerGauge(registry, () -> runningMajorMergesDocs.get(), true, "running.docs", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
-        metricManager.registerGauge(registry, () -> runningMinorMergesDocs.get(), true, "running.docs", SolrInfoMBean.Category.INDEX.toString(), "merge", "minor");
-        metricManager.registerGauge(registry, () -> runningMajorMergesSegments.get(), true, "running.segments", SolrInfoMBean.Category.INDEX.toString(), "merge", "major");
-        metricManager.registerGauge(registry, () -> runningMinorMergesSegments.get(), true, "running.segments", SolrInfoMBean.Category.INDEX.toString(), "merge", "minor");
-        flushMeter = metricManager.meter(registry, "flush", SolrInfoMBean.Category.INDEX.toString());
+        minorMerge = metricManager.timer(null, registry, "minor", SolrInfoBean.Category.INDEX.toString(), "merge");
+        majorMerge = metricManager.timer(null, registry, "major", SolrInfoBean.Category.INDEX.toString(), "merge");
+        mergeErrors = metricManager.counter(null, registry, "errors", SolrInfoBean.Category.INDEX.toString(), "merge");
+        metricManager.registerGauge(null, registry, () -> runningMajorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        metricManager.registerGauge(null, registry, () -> runningMinorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
+        metricManager.registerGauge(null, registry, () -> runningMajorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        metricManager.registerGauge(null, registry, () -> runningMinorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
+        metricManager.registerGauge(null, registry, () -> runningMajorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        metricManager.registerGauge(null, registry, () -> runningMinorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
+        flushMeter = metricManager.meter(null, registry, "flush", SolrInfoBean.Category.INDEX.toString());
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
index cbfb0d5..49d2664 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
@@ -19,14 +19,17 @@ package org.apache.solr.update;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.Vector;
 
+import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.HdfsDirectoryFactory;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrEventListener;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.util.plugin.SolrCoreAware;
@@ -41,7 +44,7 @@ import org.slf4j.LoggerFactory;
  * @since solr 0.9
  */
 
-public abstract class UpdateHandler implements SolrInfoMBean {
+public abstract class UpdateHandler implements SolrInfoBean {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected final SolrCore core;
@@ -55,6 +58,9 @@ public abstract class UpdateHandler implements SolrInfoMBean {
 
   protected final UpdateLog ulog;
 
+  protected Set<String> metricNames = new HashSet<>();
+  protected MetricRegistry registry;
+
   private void parseEventListeners() {
     final Class<SolrEventListener> clazz = SolrEventListener.class;
     final String label = "Event Listener";
@@ -221,4 +227,12 @@ public abstract class UpdateHandler implements SolrInfoMBean {
   public Category getCategory() {
     return Category.UPDATE;
   }
+  @Override
+  public Set<String> getMetricNames() {
+    return metricNames;
+  }
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/UpdateLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
index 84a2005..c50add4 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
@@ -57,7 +57,7 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.request.LocalSolrQueryRequest;
@@ -403,7 +403,7 @@ public static final int VERSION_IDX = 1;
       }
 
     }
-    core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.TLOG.toString(), this);
+    core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.TLOG.toString(), this);
   }
 
   @Override
@@ -422,12 +422,12 @@ public static final int VERSION_IDX = 1;
       }
     };
 
-    manager.registerGauge(registry, bufferedOpsGauge, true, "ops", scope, "buffered");
-    manager.registerGauge(registry, () -> logs.size(), true, "logs", scope, "replay", "remaining");
-    manager.registerGauge(registry, () -> getTotalLogsSize(), true, "bytes", scope, "replay", "remaining");
-    applyingBufferedOpsMeter = manager.meter(registry, "ops", scope, "applyingBuffered");
-    replayOpsMeter = manager.meter(registry, "ops", scope, "replay");
-    manager.registerGauge(registry, () -> state.getValue(), true, "state", scope);
+    manager.registerGauge(null, registry, bufferedOpsGauge, true, "ops", scope, "buffered");
+    manager.registerGauge(null, registry, () -> logs.size(), true, "logs", scope, "replay", "remaining");
+    manager.registerGauge(null, registry, () -> getTotalLogsSize(), true, "bytes", scope, "replay", "remaining");
+    applyingBufferedOpsMeter = manager.meter(null, registry, "ops", scope, "applyingBuffered");
+    replayOpsMeter = manager.meter(null, registry, "ops", scope, "replay");
+    manager.registerGauge(null, registry, () -> state.getValue(), true, "state", scope);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
index 9d4eb7d..20132e1 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
@@ -17,10 +17,12 @@
 package org.apache.solr.update;
 
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.concurrent.ExecutorService;
+import java.util.concurrent.ThreadFactory;
 
-import com.codahale.metrics.InstrumentedExecutorService;
+import com.codahale.metrics.MetricRegistry;
 import org.apache.http.client.HttpClient;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
@@ -29,20 +31,20 @@ import org.apache.solr.cloud.RecoveryStrategy;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
-import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.util.stats.HttpClientMetricNameStrategy;
 import org.apache.solr.util.stats.InstrumentedHttpRequestExecutor;
 import org.apache.solr.util.stats.InstrumentedPoolingHttpClientConnectionManager;
+import org.apache.solr.util.stats.MetricUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.util.stats.InstrumentedHttpRequestExecutor.KNOWN_METRIC_NAME_STRATEGIES;
 
-public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
+public class UpdateShardHandler implements SolrMetricProducer, SolrInfoBean {
   
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -56,8 +58,7 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
   private ExecutorService updateExecutor = ExecutorUtil.newMDCAwareCachedThreadPool(
       new SolrjNamedThreadFactory("updateExecutor"));
   
-  private ExecutorService recoveryExecutor = ExecutorUtil.newMDCAwareCachedThreadPool(
-      new SolrjNamedThreadFactory("recoveryExecutor"));
+  private ExecutorService recoveryExecutor;
   
   private final CloseableHttpClient client;
 
@@ -65,6 +66,9 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
 
   private final InstrumentedHttpRequestExecutor httpRequestExecutor;
 
+  private final Set<String> metricNames = new HashSet<>();
+  private MetricRegistry registry;
+
   public UpdateShardHandler(UpdateShardHandlerConfig cfg) {
     clientConnectionManager = new InstrumentedPoolingHttpClientConnectionManager(HttpClientUtil.getSchemaRegisteryProvider().getSchemaRegistry());
     if (cfg != null ) {
@@ -96,6 +100,15 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
       clientParams.set(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, cfg.getMaxUpdateConnectionsPerHost());
     }
     log.debug("Created UpdateShardHandler HTTP client with params: {}", clientParams);
+
+    ThreadFactory recoveryThreadFactory = new SolrjNamedThreadFactory("recoveryExecutor");
+    if (cfg != null && cfg.getMaxRecoveryThreads() > 0) {
+      log.debug("Creating recoveryExecutor with pool size {}", cfg.getMaxRecoveryThreads());
+      recoveryExecutor = ExecutorUtil.newMDCAwareFixedThreadPool(cfg.getMaxRecoveryThreads(), recoveryThreadFactory);
+    } else {
+      log.debug("Creating recoveryExecutor with unbounded pool");
+      recoveryExecutor = ExecutorUtil.newMDCAwareCachedThreadPool(recoveryThreadFactory);
+    }
   }
 
   @Override
@@ -104,20 +117,14 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
   }
 
   @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-
-  @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
     String expandedScope = SolrMetricManager.mkName(scope, getCategory().name());
-    clientConnectionManager.initializeMetrics(manager, registry, expandedScope);
-    httpRequestExecutor.initializeMetrics(manager, registry, expandedScope);
-    updateExecutor = new InstrumentedExecutorService(updateExecutor,
-        manager.registry(registry),
+    clientConnectionManager.initializeMetrics(manager, registryName, expandedScope);
+    httpRequestExecutor.initializeMetrics(manager, registryName, expandedScope);
+    updateExecutor = MetricUtils.instrumentedExecutorService(updateExecutor, this, registry,
         SolrMetricManager.mkName("updateExecutor", expandedScope, "threadPool"));
-    recoveryExecutor = new InstrumentedExecutorService(recoveryExecutor,
-        manager.registry(registry),
+    recoveryExecutor = MetricUtils.instrumentedExecutorService(recoveryExecutor, this, registry,
         SolrMetricManager.mkName("recoveryExecutor", expandedScope, "threadPool"));
   }
 
@@ -132,18 +139,13 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoMBean {
   }
 
   @Override
-  public String getSource() {
-    return null;
-  }
-
-  @Override
-  public URL[] getDocs() {
-    return new URL[0];
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public NamedList getStatistics() {
-    return null;
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   public HttpClient getHttpClient() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/UpdateShardHandlerConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateShardHandlerConfig.java b/solr/core/src/java/org/apache/solr/update/UpdateShardHandlerConfig.java
index d31ce50..a8b6953 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateShardHandlerConfig.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateShardHandlerConfig.java
@@ -23,11 +23,12 @@ public class UpdateShardHandlerConfig {
   public static final int DEFAULT_MAXUPDATECONNECTIONS = 100000;
   public static final int DEFAULT_MAXUPDATECONNECTIONSPERHOST = 100000;
   public static final String DEFAULT_METRICNAMESTRATEGY = "queryLessURLAndMethod";
+  public static final int DEFAULT_MAXRECOVERYTHREADS = -1;
 
   public static final UpdateShardHandlerConfig DEFAULT
       = new UpdateShardHandlerConfig(DEFAULT_MAXUPDATECONNECTIONS, DEFAULT_MAXUPDATECONNECTIONSPERHOST,
                                      DEFAULT_DISTRIBUPDATESOTIMEOUT, DEFAULT_DISTRIBUPDATECONNTIMEOUT,
-                                      DEFAULT_METRICNAMESTRATEGY);
+                                      DEFAULT_METRICNAMESTRATEGY, DEFAULT_MAXRECOVERYTHREADS);
 
   private final int maxUpdateConnections;
 
@@ -39,13 +40,16 @@ public class UpdateShardHandlerConfig {
 
   private final String metricNameStrategy;
 
+  private final int maxRecoveryThreads;
+
   public UpdateShardHandlerConfig(int maxUpdateConnections, int maxUpdateConnectionsPerHost, int distributedSocketTimeout, int distributedConnectionTimeout,
-                                  String metricNameStrategy) {
+                                  String metricNameStrategy, int maxRecoveryThreads) {
     this.maxUpdateConnections = maxUpdateConnections;
     this.maxUpdateConnectionsPerHost = maxUpdateConnectionsPerHost;
     this.distributedSocketTimeout = distributedSocketTimeout;
     this.distributedConnectionTimeout = distributedConnectionTimeout;
     this.metricNameStrategy = metricNameStrategy;
+    this.maxRecoveryThreads = maxRecoveryThreads;
   }
 
   public int getMaxUpdateConnectionsPerHost() {
@@ -67,4 +71,8 @@ public class UpdateShardHandlerConfig {
   public String getMetricNameStrategy() {
     return metricNameStrategy;
   }
+
+  public int getMaxRecoveryThreads() {
+    return maxRecoveryThreads;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 55d1fc8..cb1b2fb 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -73,7 +73,7 @@ import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.Hash;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.CoreDescriptor;
+import org.apache.solr.core.CoreContainer;
 import org.apache.solr.handler.component.RealTimeGetComponent;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
@@ -311,17 +311,16 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     // this should always be used - see filterParams
     DistributedUpdateProcessorFactory.addParamToDistributedRequestWhitelist
       (this.req, UpdateParams.UPDATE_CHAIN, TEST_DISTRIB_SKIP_SERVERS, CommonParams.VERSION_FIELD);
-    
-    CoreDescriptor coreDesc = req.getCore().getCoreDescriptor();
-    
-    this.zkEnabled  = coreDesc.getCoreContainer().isZooKeeperAware();
-    zkController = req.getCore().getCoreDescriptor().getCoreContainer().getZkController();
+
+    CoreContainer cc = req.getCore().getCoreContainer();
+
+    this.zkEnabled  = cc.isZooKeeperAware();
+    zkController = cc.getZkController();
     if (zkEnabled) {
-      cmdDistrib = new SolrCmdDistributor(coreDesc.getCoreContainer().getUpdateShardHandler());
+      cmdDistrib = new SolrCmdDistributor(cc.getUpdateShardHandler());
     }
     //this.rsp = reqInfo != null ? reqInfo.getRsp() : null;
-
-    cloudDesc = coreDesc.getCloudDescriptor();
+    cloudDesc = req.getCore().getCoreDescriptor().getCloudDescriptor();
     
     if (cloudDesc != null) {
       collection = cloudDesc.getCollectionName();
@@ -597,7 +596,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
                           ZkStateReader.COLLECTION_PROP, collection,
                           ZkStateReader.SHARD_ID_PROP, myShardId,
                           "routeKey", routeKey + "!");
-                      SolrZkClient zkClient = req.getCore().getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+                      SolrZkClient zkClient = req.getCore().getCoreContainer().getZkController().getZkClient();
                       DistributedQueue queue = Overseer.getStateUpdateQueue(zkClient);
                       queue.offer(Utils.toJSON(map));
                     } catch (KeeperException e) {
@@ -948,7 +947,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
             Throwable rootCause = SolrException.getRootCause(error.e);
             log.error("Setting up to try to start recovery on replica {}", replicaUrl, rootCause);
             zkController.ensureReplicaInLeaderInitiatedRecovery(
-                req.getCore().getCoreDescriptor().getCoreContainer(),
+                req.getCore().getCoreContainer(),
                 collection,
                 shardId,
                 stdNode.getNodeProps(),
@@ -1302,7 +1301,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
    */
   private UpdateCommand fetchFullUpdateFromLeader(AddUpdateCommand inplaceAdd, long versionOnUpdate) throws IOException {
     String id = inplaceAdd.getPrintableId();
-    UpdateShardHandler updateShardHandler = inplaceAdd.getReq().getCore().getCoreDescriptor().getCoreContainer().getUpdateShardHandler();
+    UpdateShardHandler updateShardHandler = inplaceAdd.getReq().getCore().getCoreContainer().getUpdateShardHandler();
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set(DISTRIB, false);
     params.set("getInputDocument", id);
@@ -1742,7 +1741,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
     // Streaming updates can delay shutdown and cause big update reorderings (new streams can't be
     // initiated, but existing streams carry on).  This is why we check if the CC is shutdown.
     // See SOLR-8203 and loop HdfsChaosMonkeyNothingIsSafeTest (and check for inconsistent shards) to test.
-    if (req.getCore().getCoreDescriptor().getCoreContainer().isShutDown()) {
+    if (req.getCore().getCoreContainer().isShutDown()) {
       throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "CoreContainer is shutting down.");
     }
 
@@ -1960,7 +1959,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
 
   
   private List<Node> getCollectionUrls(SolrQueryRequest req, String collection) {
-    ClusterState clusterState = req.getCore().getCoreDescriptor()
+    ClusterState clusterState = req.getCore()
         .getCoreContainer().getZkController().getClusterState();
     Map<String,Slice> slices = clusterState.getSlicesMap(collection);
     if (slices == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java
index 332dba6..c4234cb 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java
@@ -454,7 +454,7 @@ public final class DocExpirationUpdateProcessorFactory
    * </p>
    */
   private boolean iAmInChargeOfPeriodicDeletes() {
-    ZkController zk = core.getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zk = core.getCoreContainer().getZkController();
 
     if (null == zk) return true;
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
index 9c1a565..2f4de12 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/TolerantUpdateProcessor.java
@@ -134,7 +134,7 @@ public class TolerantUpdateProcessor extends UpdateRequestProcessor {
     this.distribPhase = distribPhase;
     assert ! DistribPhase.FROMLEADER.equals(distribPhase);
     
-    this.zkController = this.req.getCore().getCoreDescriptor().getCoreContainer().getZkController();
+    this.zkController = this.req.getCore().getCoreContainer().getZkController();
     this.uniqueKeyField = this.req.getCore().getLatestSchema().getUniqueKeyField();
     assert null != uniqueKeyField : "Factory didn't enforce uniqueKey field?";
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/util/JmxUtil.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/JmxUtil.java b/solr/core/src/java/org/apache/solr/util/JmxUtil.java
index 02a070d..f27a55e 100644
--- a/solr/core/src/java/org/apache/solr/util/JmxUtil.java
+++ b/solr/core/src/java/org/apache/solr/util/JmxUtil.java
@@ -27,9 +27,6 @@ import java.util.List;
 
 /**
  * Utility methods to find a MBeanServer.
- *
- * This was factored out from {@link org.apache.solr.core.JmxMonitoredMap}
- * and can eventually replace the logic used there.
  */
 public final class JmxUtil {
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java b/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
index b79ec0c..a60ada8 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java
@@ -164,7 +164,7 @@ public class SolrLogLayout extends Layout {
         sb.append(" " + core);
       }
 
-      zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+      zkController = core.getCoreContainer().getZkController();
       if (zkController != null) {
         if (info.url == null) {
           info.url = zkController.getBaseUrl() + "/" + core.getName();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
index 7bcabf8..58ec69e 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
@@ -35,10 +35,10 @@ public class InstrumentedPoolingHttpClientConnectionManager extends PoolingHttpC
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
-    manager.registerGauge(registry, () -> getTotalStats().getAvailable(), true, SolrMetricManager.mkName("availableConnections", scope));
+    manager.registerGauge(null, registry, () -> getTotalStats().getAvailable(), true, SolrMetricManager.mkName("availableConnections", scope));
     // this acquires a lock on the connection pool; remove if contention sucks
-    manager.registerGauge(registry, () -> getTotalStats().getLeased(), true, SolrMetricManager.mkName("leasedConnections", scope));
-    manager.registerGauge(registry, () -> getTotalStats().getMax(), true, SolrMetricManager.mkName("maxConnections", scope));
-    manager.registerGauge(registry, () -> getTotalStats().getPending(), true, SolrMetricManager.mkName("pendingConnections", scope));
+    manager.registerGauge(null, registry, () -> getTotalStats().getLeased(), true, SolrMetricManager.mkName("leasedConnections", scope));
+    manager.registerGauge(null, registry, () -> getTotalStats().getMax(), true, SolrMetricManager.mkName("maxConnections", scope));
+    manager.registerGauge(null, registry, () -> getTotalStats().getPending(), true, SolrMetricManager.mkName("pendingConnections", scope));
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index 491932d..a190a0f 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -16,9 +16,18 @@
  */
 package org.apache.solr.util.stats;
 
+import java.beans.BeanInfo;
+import java.beans.IntrospectionException;
+import java.beans.Introspector;
+import java.beans.PropertyDescriptor;
 import java.lang.invoke.MethodHandles;
+import java.lang.management.OperatingSystemMXBean;
+import java.lang.management.PlatformManagedObject;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.LinkedHashMap;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.SortedSet;
@@ -39,7 +48,7 @@ import com.codahale.metrics.Snapshot;
 import com.codahale.metrics.Timer;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.AggregateMetric;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -77,6 +86,20 @@ public class MetricUtils {
   static final String P999_MS = P999 + MS;
 
   /**
+   * This filter can limit what properties of a metric are returned.
+   */
+  public interface PropertyFilter {
+    PropertyFilter ALL = (name) -> true;
+
+    /**
+     * Return only properties that match.
+     * @param name property name
+     * @return true if this property should be returned, false otherwise.
+     */
+    boolean accept(String name);
+  }
+
+  /**
    * Adds metrics from a Timer to a NamedList, using well-known back-compat names.
    * @param lst The NamedList to add the metrics data to
    * @param timer The Timer to extract the metrics from
@@ -105,70 +128,30 @@ public class MetricUtils {
   }
 
   /**
-   * Returns a NamedList representation of the given metric registry. Only those metrics
-   * are converted to NamedList which match at least one of the given MetricFilter instances.
-   *
-   * @param registry      the {@link MetricRegistry} to be converted to NamedList
-   * @param shouldMatchFilters a list of {@link MetricFilter} instances.
-   *                           A metric must match <em>any one</em> of the filters from this list to be
-   *                           included in the output
-   * @param mustMatchFilter a {@link MetricFilter}.
-   *                        A metric <em>must</em> match this filter to be included in the output.
-   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
-   * @param compact use compact representation for counters and gauges.
-   * @param metadata optional metadata. If not null and not empty then this map will be added under a
-   *                 {@code _metadata_} key.
-   * @return a {@link NamedList}
-   */
-  public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
-                                      MetricFilter mustMatchFilter, boolean skipHistograms,
-                                      boolean skipAggregateValues, boolean compact,
-                                      Map<String, Object> metadata) {
-    NamedList result = new SimpleOrderedMap();
-    toMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, compact, (k, v) -> {
-      result.add(k, v);
-    });
-    if (metadata != null && !metadata.isEmpty()) {
-      result.add("_metadata_", metadata);
-    }
-    return result;
-  }
-
-  /**
-   * Returns a representation of the given metric registry as a list of {@link SolrInputDocument}-s.
+   * Provides a representation of the given metric registry as {@link SolrInputDocument}-s.
    Only those metrics
-   * are converted to NamedList which match at least one of the given MetricFilter instances.
+   * are converted which match at least one of the given MetricFilter instances.
    *
-   * @param registry      the {@link MetricRegistry} to be converted to NamedList
+   * @param registry      the {@link MetricRegistry} to be converted
    * @param shouldMatchFilters a list of {@link MetricFilter} instances.
    *                           A metric must match <em>any one</em> of the filters from this list to be
    *                           included in the output
    * @param mustMatchFilter a {@link MetricFilter}.
    *                        A metric <em>must</em> match this filter to be included in the output.
+   * @param propertyFilter limit what properties of a metric are returned
    * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
    * @param compact use compact representation for counters and gauges.
    * @param metadata optional metadata. If not null and not empty then this map will be added under a
    *                 {@code _metadata_} key.
-   * @return a list of {@link SolrInputDocument}-s
+   * @param consumer consumer that accepts produced {@link SolrInputDocument}-s
    */
-  public static List<SolrInputDocument> toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
-                                                             MetricFilter mustMatchFilter, boolean skipHistograms,
-                                                             boolean skipAggregateValues, boolean compact,
-                                                             Map<String, Object> metadata) {
-    List<SolrInputDocument> result = new LinkedList<>();
-    toSolrInputDocuments(registry, shouldMatchFilters, mustMatchFilter, skipHistograms,
-        skipAggregateValues, compact, metadata, doc -> {
-      result.add(doc);
-    });
-    return result;
-  }
-
   public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
-                                          MetricFilter mustMatchFilter, boolean skipHistograms,
+                                          MetricFilter mustMatchFilter, PropertyFilter propertyFilter, boolean skipHistograms,
                                           boolean skipAggregateValues, boolean compact,
                                           Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
     boolean addMetadata = metadata != null && !metadata.isEmpty();
-    toMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, compact, (k, v) -> {
+    toMaps(registry, shouldMatchFilters, mustMatchFilter, propertyFilter, skipHistograms, skipAggregateValues, compact, false, (k, v) -> {
       SolrInputDocument doc = new SolrInputDocument();
       doc.setField(METRIC_NAME, k);
       toSolrInputDocument(null, doc, v);
@@ -179,7 +162,13 @@ public class MetricUtils {
     });
   }
 
-  public static void toSolrInputDocument(String prefix, SolrInputDocument doc, Object o) {
+  /**
+   * Fill in a SolrInputDocument with values from a converted metric, recursively.
+   * @param prefix prefix to add to generated field names, or null if none.
+   * @param doc document to fill
+   * @param o an instance of converted metric, either a Map or a flat Object
+   */
+  static void toSolrInputDocument(String prefix, SolrInputDocument doc, Object o) {
     if (!(o instanceof Map)) {
       String key = prefix != null ? prefix : VALUE;
       doc.addField(key, o);
@@ -196,77 +185,192 @@ public class MetricUtils {
     }
   }
 
+  /**
+   * Convert selected metrics to maps or to flattened objects.
+   * @param registry source of metrics
+   * @param shouldMatchFilters metrics must match any of these filters
+   * @param mustMatchFilter metrics must match this filter
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
+   * @param compact use compact representation for counters and gauges.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
   public static void toMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
-                            MetricFilter mustMatchFilter, boolean skipHistograms, boolean skipAggregateValues,
-                            boolean compact,
-                            BiConsumer<String, Object> consumer) {
-    Map<String, Metric> metrics = registry.getMetrics();
-    SortedSet<String> names = registry.getNames();
+                     MetricFilter mustMatchFilter, PropertyFilter propertyFilter,
+                     boolean skipHistograms, boolean skipAggregateValues,
+                     boolean compact, boolean simple,
+                     BiConsumer<String, Object> consumer) {
+    final Map<String, Metric> metrics = registry.getMetrics();
+    final SortedSet<String> names = registry.getNames();
     names.stream()
         .filter(s -> shouldMatchFilters.stream().anyMatch(metricFilter -> metricFilter.matches(s, metrics.get(s))))
         .filter(s -> mustMatchFilter.matches(s, metrics.get(s)))
         .forEach(n -> {
           Metric metric = metrics.get(n);
-          if (metric instanceof Counter) {
-            Counter counter = (Counter) metric;
-            consumer.accept(n, convertCounter(counter, compact));
-          } else if (metric instanceof Gauge) {
-            Gauge gauge = (Gauge) metric;
-            try {
-              consumer.accept(n, convertGauge(gauge, compact));
-            } catch (InternalError ie) {
-              if (n.startsWith("memory.") && ie.getMessage().contains("Memory Pool not found")) {
-                LOG.warn("Error converting gauge '" + n + "', possible JDK bug: SOLR-10362", ie);
-                consumer.accept(n, null);
-              } else {
-                throw ie;
-              }
-            }
-          } else if (metric instanceof Meter) {
-            Meter meter = (Meter) metric;
-            consumer.accept(n, convertMeter(meter));
-          } else if (metric instanceof Timer) {
-            Timer timer = (Timer) metric;
-            consumer.accept(n, convertTimer(timer, skipHistograms));
-          } else if (metric instanceof Histogram) {
-            if (!skipHistograms) {
-              Histogram histogram = (Histogram) metric;
-              consumer.accept(n, convertHistogram(histogram));
-            }
-          } else if (metric instanceof AggregateMetric) {
-            consumer.accept(n, convertAggregateMetric((AggregateMetric)metric, skipAggregateValues));
-          }
+          convertMetric(n, metric, propertyFilter, skipHistograms, skipAggregateValues, compact, simple, consumer);
         });
   }
 
-  static Map<String, Object> convertAggregateMetric(AggregateMetric metric, boolean skipAggregateValues) {
-    Map<String, Object> response = new LinkedHashMap<>();
-    response.put("count", metric.size());
-    response.put(MAX, metric.getMax());
-    response.put(MIN, metric.getMin());
-    response.put(MEAN, metric.getMean());
-    response.put(STDDEV, metric.getStdDev());
-    response.put(SUM, metric.getSum());
-    if (!(metric.isEmpty() || skipAggregateValues)) {
-      Map<String, Object> values = new LinkedHashMap<>();
-      response.put(VALUES, values);
-      metric.getValues().forEach((k, v) -> {
-        Map<String, Object> map = new LinkedHashMap<>();
-        map.put("value", v.value);
-        map.put("updateCount", v.updateCount.get());
-        values.put(k, map);
-      });
+  /**
+   * Convert selected metrics from a registry into a map, with metrics in a compact AND simple format.
+   * @param registry registry
+   * @param names metric names
+   * @return map where keys are metric names (if they were present in the registry) and values are
+   * converted metrics in simplified format.
+   */
+  public static Map<String, Object> convertMetrics(MetricRegistry registry, Collection<String> names) {
+    final Map<String, Object> metrics = new HashMap<>();
+    convertMetrics(registry, names, false, true, true, true, (k, v) -> metrics.put(k, v));
+    return metrics;
+  }
+
+  /**
+   * Convert selected metrics from a registry into maps (when <code>compact==false</code>) or
+   * flattened objects.
+   * @param registry registry
+   * @param names metric names
+   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
+   * @param compact use compact representation for counters and gauges.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  public static void convertMetrics(MetricRegistry registry, Collection<String> names,
+                                    boolean skipHistograms, boolean skipAggregateValues,
+                                    boolean compact, boolean simple,
+                                    BiConsumer<String, Object> consumer) {
+    final Map<String, Metric> metrics = registry.getMetrics();
+    names.stream()
+        .forEach(n -> {
+          Metric metric = metrics.get(n);
+          convertMetric(n, metric, PropertyFilter.ALL, skipHistograms, skipAggregateValues, compact, simple, consumer);
+        });
+  }
+
+  /**
+   * Convert a single instance of metric into a map or flattened object.
+   * @param n metric name
+   * @param metric metric instance
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
+   * @param compact use compact representation for counters and gauges.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  static void convertMetric(String n, Metric metric, PropertyFilter propertyFilter, boolean skipHistograms, boolean skipAggregateValues,
+                              boolean compact, boolean simple, BiConsumer<String, Object> consumer) {
+    if (metric instanceof Counter) {
+      Counter counter = (Counter) metric;
+      convertCounter(n, counter, propertyFilter, compact, consumer);
+    } else if (metric instanceof Gauge) {
+      Gauge gauge = (Gauge) metric;
+      try {
+        convertGauge(n, gauge, propertyFilter, simple, compact, consumer);
+      } catch (InternalError ie) {
+        if (n.startsWith("memory.") && ie.getMessage().contains("Memory Pool not found")) {
+          LOG.warn("Error converting gauge '" + n + "', possible JDK bug: SOLR-10362", ie);
+          consumer.accept(n, null);
+        } else {
+          throw ie;
+        }
+      }
+    } else if (metric instanceof Meter) {
+      Meter meter = (Meter) metric;
+      convertMeter(n, meter, propertyFilter, simple, consumer);
+    } else if (metric instanceof Timer) {
+      Timer timer = (Timer) metric;
+      convertTimer(n, timer, propertyFilter, skipHistograms, simple, consumer);
+    } else if (metric instanceof Histogram) {
+      if (!skipHistograms) {
+        Histogram histogram = (Histogram) metric;
+        convertHistogram(n, histogram, propertyFilter, simple, consumer);
+      }
+    } else if (metric instanceof AggregateMetric) {
+      convertAggregateMetric(n, (AggregateMetric)metric, propertyFilter, skipAggregateValues, simple, consumer);
+    }
+  }
+
+  /**
+   * Convert an instance of {@link AggregateMetric}.
+   * @param name metric name
+   * @param metric an instance of {@link AggregateMetric}
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  static void convertAggregateMetric(String name, AggregateMetric metric,
+      PropertyFilter propertyFilter,
+      boolean skipAggregateValues, boolean simple, BiConsumer<String, Object> consumer) {
+    if (simple) {
+      if (propertyFilter.accept(MEAN)) {
+        consumer.accept(name + "." + MEAN, metric.getMean());
+      }
+    } else {
+      Map<String, Object> response = new LinkedHashMap<>();
+      BiConsumer<String, Object> filter = (k, v) -> {
+        if (propertyFilter.accept(k)) {
+          response.put(k, v);
+        }
+      };
+      filter.accept("count", metric.size());
+      filter.accept(MAX, metric.getMax());
+      filter.accept(MIN, metric.getMin());
+      filter.accept(MEAN, metric.getMean());
+      filter.accept(STDDEV, metric.getStdDev());
+      filter.accept(SUM, metric.getSum());
+      if (!(metric.isEmpty() || skipAggregateValues)) {
+        Map<String, Object> values = new LinkedHashMap<>();
+        response.put(VALUES, values);
+        metric.getValues().forEach((k, v) -> {
+          Map<String, Object> map = new LinkedHashMap<>();
+          map.put("value", v.value);
+          map.put("updateCount", v.updateCount.get());
+          values.put(k, map);
+        });
+      }
+      if (!response.isEmpty()) {
+        consumer.accept(name, response);
+      }
     }
-    return response;
   }
 
-  static Map<String, Object> convertHistogram(Histogram histogram) {
-    Map<String, Object> response = new LinkedHashMap<>();
+  /**
+   * Convert an instance of {@link Histogram}. NOTE: it's assumed that histogram contains non-time
+   * based values that don't require unit conversion.
+   * @param name metric name
+   * @param histogram an instance of {@link Histogram}
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  static void convertHistogram(String name, Histogram histogram, PropertyFilter propertyFilter,
+                                              boolean simple, BiConsumer<String, Object> consumer) {
     Snapshot snapshot = histogram.getSnapshot();
-    response.put("count", histogram.getCount());
-    // non-time based values
-    addSnapshot(response, snapshot, false);
-    return response;
+    if (simple) {
+      if (propertyFilter.accept(MEAN)) {
+        consumer.accept(name + "." + MEAN, snapshot.getMean());
+      }
+    } else {
+      Map<String, Object> response = new LinkedHashMap<>();
+      String prop = "count";
+      if (propertyFilter.accept(prop)) {
+        response.put(prop, histogram.getCount());
+      }
+      // non-time based values
+      addSnapshot(response, snapshot, propertyFilter, false);
+      if (!response.isEmpty()) {
+        consumer.accept(name, response);
+      }
+    }
   }
 
   // optionally convert ns to ms
@@ -279,66 +383,258 @@ public class MetricUtils {
   }
 
   // some snapshots represent time in ns, other snapshots represent raw values (eg. chunk size)
-  static void addSnapshot(Map<String, Object> response, Snapshot snapshot, boolean ms) {
-    response.put((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
-    response.put((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
-    response.put((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
-    response.put((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
-    response.put((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
-    response.put((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
-    response.put((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
-    response.put((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
-    response.put((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
+  static void addSnapshot(Map<String, Object> response, Snapshot snapshot, PropertyFilter propertyFilter, boolean ms) {
+    BiConsumer<String, Object> filter = (k, v) -> {
+      if (propertyFilter.accept(k)) {
+        response.put(k, v);
+      }
+    };
+    filter.accept((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
+    filter.accept((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
+    filter.accept((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
+    filter.accept((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
+    filter.accept((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
+    filter.accept((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
+    filter.accept((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
+    filter.accept((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
+    filter.accept((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
   }
 
-  static Map<String,Object> convertTimer(Timer timer, boolean skipHistograms) {
-    Map<String, Object> response = new LinkedHashMap<>();
-    response.put("count", timer.getCount());
-    response.put("meanRate", timer.getMeanRate());
-    response.put("1minRate", timer.getOneMinuteRate());
-    response.put("5minRate", timer.getFiveMinuteRate());
-    response.put("15minRate", timer.getFifteenMinuteRate());
-    if (!skipHistograms) {
-      // time-based values in nanoseconds
-      addSnapshot(response, timer.getSnapshot(), true);
+  /**
+   * Convert a {@link Timer} to a map.
+   * @param name metric name
+   * @param timer timer instance
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipHistograms if true then discard the histogram part of the timer.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  public static void convertTimer(String name, Timer timer, PropertyFilter propertyFilter, boolean skipHistograms,
+                                                boolean simple, BiConsumer<String, Object> consumer) {
+    if (simple) {
+      String prop = "meanRate";
+      if (propertyFilter.accept(prop)) {
+        consumer.accept(name + "." + prop, timer.getMeanRate());
+      }
+    } else {
+      Map<String, Object> response = new LinkedHashMap<>();
+      BiConsumer<String,Object> filter = (k, v) -> {
+        if (propertyFilter.accept(k)) {
+          response.put(k, v);
+        }
+      };
+      filter.accept("count", timer.getCount());
+      filter.accept("meanRate", timer.getMeanRate());
+      filter.accept("1minRate", timer.getOneMinuteRate());
+      filter.accept("5minRate", timer.getFiveMinuteRate());
+      filter.accept("15minRate", timer.getFifteenMinuteRate());
+      if (!skipHistograms) {
+        // time-based values in nanoseconds
+        addSnapshot(response, timer.getSnapshot(), propertyFilter, true);
+      }
+      if (!response.isEmpty()) {
+        consumer.accept(name, response);
+      }
     }
-    return response;
   }
 
-  static Map<String, Object> convertMeter(Meter meter) {
-    Map<String, Object> response = new LinkedHashMap<>();
-    response.put("count", meter.getCount());
-    response.put("meanRate", meter.getMeanRate());
-    response.put("1minRate", meter.getOneMinuteRate());
-    response.put("5minRate", meter.getFiveMinuteRate());
-    response.put("15minRate", meter.getFifteenMinuteRate());
-    return response;
+  /**
+   * Convert a {@link Meter} to a map.
+   * @param name metric name
+   * @param meter meter instance
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  static void convertMeter(String name, Meter meter, PropertyFilter propertyFilter, boolean simple, BiConsumer<String, Object> consumer) {
+    if (simple) {
+      if (propertyFilter.accept("count")) {
+        consumer.accept(name + ".count", meter.getCount());
+      }
+    } else {
+      Map<String, Object> response = new LinkedHashMap<>();
+      BiConsumer<String, Object> filter = (k, v) -> {
+        if (propertyFilter.accept(k)) {
+          response.put(k, v);
+        }
+      };
+      filter.accept("count", meter.getCount());
+      filter.accept("meanRate", meter.getMeanRate());
+      filter.accept("1minRate", meter.getOneMinuteRate());
+      filter.accept("5minRate", meter.getFiveMinuteRate());
+      filter.accept("15minRate", meter.getFifteenMinuteRate());
+      if (!response.isEmpty()) {
+        consumer.accept(name, response);
+      }
+    }
   }
 
-  static Object convertGauge(Gauge gauge, boolean compact) {
-    if (compact) {
-      return gauge.getValue();
+  /**
+   * Convert a {@link Gauge}.
+   * @param name metric name
+   * @param gauge gauge instance
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param compact if true then only return {@link Gauge#getValue()}. If false
+   *                then return a map with a "value" field.
+   * @param consumer consumer that accepts produced objects
+   */
+  static void convertGauge(String name, Gauge gauge, PropertyFilter propertyFilter, boolean simple, boolean compact,
+                             BiConsumer<String, Object> consumer) {
+    if (compact || simple) {
+      Object o = gauge.getValue();
+      if (o instanceof Map) {
+        if (simple) {
+          for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
+            String prop = entry.getKey().toString();
+            if (propertyFilter.accept(prop)) {
+              consumer.accept(name + "." + prop, entry.getValue());
+            }
+          }
+        } else {
+          Map<String, Object> val = new HashMap<>();
+          for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
+            String prop = entry.getKey().toString();
+            if (propertyFilter.accept(prop)) {
+              val.put(prop, entry.getValue());
+            }
+          }
+          if (!val.isEmpty()) {
+            consumer.accept(name, val);
+          }
+        }
+      } else {
+        consumer.accept(name, o);
+      }
     } else {
+      Object o = gauge.getValue();
       Map<String, Object> response = new LinkedHashMap<>();
-      response.put("value", gauge.getValue());
-      return response;
+      if (o instanceof Map) {
+        for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
+          String prop = entry.getKey().toString();
+          if (propertyFilter.accept(prop)) {
+            response.put(prop, entry.getValue());
+          }
+        }
+        if (!response.isEmpty()) {
+          consumer.accept(name, Collections.singletonMap("value", response));
+        }
+      } else {
+        if (propertyFilter.accept("value")) {
+          response.put("value", o);
+          consumer.accept(name, response);
+        }
+      }
     }
   }
 
-  static Object convertCounter(Counter counter, boolean compact) {
+  /**
+   * Convert a {@link Counter}
+   * @param counter counter instance
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param compact if true then only return {@link Counter#getCount()}. If false
+   *                then return a map with a "count" field.
+   */
+  static void convertCounter(String name, Counter counter, PropertyFilter propertyFilter, boolean compact, BiConsumer<String, Object> consumer) {
     if (compact) {
-      return counter.getCount();
+      consumer.accept(name, counter.getCount());
     } else {
-      Map<String, Object> response = new LinkedHashMap<>();
-      response.put("count", counter.getCount());
-      return response;
+      if (propertyFilter.accept("count")) {
+        Map<String, Object> response = new LinkedHashMap<>();
+        response.put("count", counter.getCount());
+        consumer.accept(name, response);
+      }
     }
   }
 
   /**
    * Returns an instrumented wrapper over the given executor service.
    */
-  public static ExecutorService instrumentedExecutorService(ExecutorService delegate, MetricRegistry metricRegistry, String scope)  {
+  public static ExecutorService instrumentedExecutorService(ExecutorService delegate, SolrInfoBean info, MetricRegistry metricRegistry, String scope)  {
+    if (info != null && info.getMetricNames() != null) {
+      info.getMetricNames().add(MetricRegistry.name(scope, "submitted"));
+      info.getMetricNames().add(MetricRegistry.name(scope, "running"));
+      info.getMetricNames().add(MetricRegistry.name(scope, "completed"));
+      info.getMetricNames().add(MetricRegistry.name(scope, "duration"));
+    }
     return new InstrumentedExecutorService(delegate, metricRegistry, scope);
   }
+
+  /**
+   * Creates a set of metrics (gauges) that correspond to available bean properties for the provided MXBean.
+   * @param obj an instance of MXBean
+   * @param intf MXBean interface, one of {@link PlatformManagedObject}-s
+   * @param consumer consumer for created names and metrics
+   * @param <T> formal type
+   */
+  public static <T extends PlatformManagedObject> void addMXBeanMetrics(T obj, Class<? extends T> intf,
+      String prefix, BiConsumer<String, Metric> consumer) {
+    if (intf.isInstance(obj)) {
+      BeanInfo beanInfo;
+      try {
+        beanInfo = Introspector.getBeanInfo(intf, intf.getSuperclass(), Introspector.IGNORE_ALL_BEANINFO);
+      } catch (IntrospectionException e) {
+        LOG.warn("Unable to fetch properties of MXBean " + obj.getClass().getName());
+        return;
+      }
+      for (final PropertyDescriptor desc : beanInfo.getPropertyDescriptors()) {
+        final String name = desc.getName();
+        // test if it works at all
+        try {
+          desc.getReadMethod().invoke(obj);
+          // worked - consume it
+          final Gauge<?> gauge = () -> {
+            try {
+              return desc.getReadMethod().invoke(obj);
+            } catch (InvocationTargetException ite) {
+              // ignore (some properties throw UOE)
+              return null;
+            } catch (IllegalAccessException e) {
+              return null;
+            }
+          };
+          String metricName = MetricRegistry.name(prefix, name);
+          consumer.accept(metricName, gauge);
+        } catch (Exception e) {
+          // didn't work, skip it...
+        }
+      }
+    }
+  }
+
+  /**
+   * These are well-known implementations of {@link java.lang.management.OperatingSystemMXBean}.
+   * Some of them provide additional useful properties beyond those declared by the interface.
+   */
+  public static String[] OS_MXBEAN_CLASSES = new String[] {
+      OperatingSystemMXBean.class.getName(),
+      "com.sun.management.OperatingSystemMXBean",
+      "com.sun.management.UnixOperatingSystemMXBean",
+      "com.ibm.lang.management.OperatingSystemMXBean"
+  };
+
+  /**
+   * Creates a set of metrics (gauges) that correspond to available bean properties for the provided MXBean.
+   * @param obj an instance of MXBean
+   * @param interfaces interfaces that it may implement. Each interface will be tried in turn, and only
+   *                   if it exists and if it contains unique properties then they will be added as metrics.
+   * @param prefix optional prefix for metric names
+   * @param consumer consumer for created names and metrics
+   * @param <T> formal type
+   */
+  public static <T extends PlatformManagedObject> void addMXBeanMetrics(T obj, String[] interfaces,
+      String prefix, BiConsumer<String, Metric> consumer) {
+    for (String clazz : interfaces) {
+      try {
+        final Class<? extends PlatformManagedObject> intf = Class.forName(clazz)
+            .asSubclass(PlatformManagedObject.class);
+        MetricUtils.addMXBeanMetrics(obj, intf, null, consumer);
+      } catch (ClassNotFoundException e) {
+        // ignore
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/resources/apispec/cluster.aliases.json
----------------------------------------------------------------------
diff --git a/solr/core/src/resources/apispec/cluster.aliases.json b/solr/core/src/resources/apispec/cluster.aliases.json
new file mode 100644
index 0000000..9cffb71
--- /dev/null
+++ b/solr/core/src/resources/apispec/cluster.aliases.json
@@ -0,0 +1,12 @@
+{
+  "documentation": "https://cwiki.apache.org/confluence/display/solr/Collections+API",
+  "description": "Provides list of collection alises.",
+  "methods": [
+    "GET"
+  ],
+  "url": {
+    "paths": [
+      "/cluster/aliases"
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-point.xml b/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
index ed169a1..ae6a11e 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-point.xml
@@ -51,37 +51,74 @@
    <dynamicField name="*_p_i_dv"  type="pint"    indexed="true"  stored="true" docValues="true"/>
    <dynamicField name="*_p_i_mv"  type="pint"    indexed="true"  stored="true" multiValued="true"/>
    <dynamicField name="*_p_i_mv_dv"  type="pint"    indexed="true"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_i_ni"  type="pint"    indexed="false"  stored="true" docValues="false"/>
    <dynamicField name="*_p_i_ni_dv"  type="pint"    indexed="false"  stored="true" docValues="true"/>
+   <dynamicField name="*_p_i_ni_dv_ns"  type="pint"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false"/>
+   <dynamicField name="*_p_i_ni_dv_ns_mv"  type="pint"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false" multiValued="true"/>
+   <dynamicField name="*_p_i_ni_mv"  type="pint"    indexed="false"  stored="true" docValues="false" multiValued="true"/>
    <dynamicField name="*_p_i_ni_mv_dv"  type="pint"    indexed="false"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_i_ni_ns"  type="pint"    indexed="false"  stored="false" docValues="false" />
+   <dynamicField name="*_p_i_ni_ns_mv"  type="pint"    indexed="false"  stored="false" docValues="false" multiValued="true"/>
    
    <dynamicField name="*_p_l"  type="plong"    indexed="true"  stored="true"/>
    <dynamicField name="*_p_l_dv"  type="plong"    indexed="true"  stored="true" docValues="true"/>
    <dynamicField name="*_p_l_mv"  type="plong"    indexed="true"  stored="true" multiValued="true"/>
    <dynamicField name="*_p_l_mv_dv"  type="plong"    indexed="true"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_l_ni"  type="plong"    indexed="false"  stored="true" docValues="false"/>
    <dynamicField name="*_p_l_ni_dv"  type="plong"    indexed="false"  stored="true" docValues="true"/>
+   <dynamicField name="*_p_l_ni_dv_ns"  type="plong"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false"/>
+   <dynamicField name="*_p_l_ni_dv_ns_mv"  type="plong"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false" multiValued="true"/>
+   <dynamicField name="*_p_l_ni_mv"  type="plong"    indexed="false"  stored="true" docValues="false" multiValued="true"/>
    <dynamicField name="*_p_l_ni_mv_dv"  type="plong"    indexed="false"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_l_ni_ns"  type="plong"    indexed="false"  stored="false" docValues="false" />
+   <dynamicField name="*_p_l_ni_ns_mv"  type="plong"    indexed="false"  stored="false" docValues="false" multiValued="true"/>
    
    <dynamicField name="*_p_d"  type="pdouble"    indexed="true"  stored="true"/>
    <dynamicField name="*_p_d_dv"  type="pdouble"    indexed="true"  stored="true" docValues="true"/>
    <dynamicField name="*_p_d_mv"  type="pdouble"    indexed="true"  stored="true" multiValued="true"/>
    <dynamicField name="*_p_d_mv_dv"  type="pdouble"    indexed="true"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_d_ni"  type="pdouble"    indexed="false"  stored="true" docValues="false"/>
    <dynamicField name="*_p_d_ni_dv"  type="pdouble"    indexed="false"  stored="true" docValues="true"/>
+   <dynamicField name="*_p_d_ni_dv_ns"  type="pdouble"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false"/>
+   <dynamicField name="*_p_d_ni_dv_ns_mv"  type="pdouble"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false" multiValued="true"/>
+   <dynamicField name="*_p_d_ni_mv"  type="pdouble"    indexed="false"  stored="true" docValues="false" multiValued="true"/>
    <dynamicField name="*_p_d_ni_mv_dv"  type="pdouble"    indexed="false"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_d_ni_ns"  type="pdouble"    indexed="false"  stored="false" docValues="false"/>
+   <dynamicField name="*_p_d_ni_ns_mv"  type="pdouble"    indexed="false"  stored="false" docValues="false" multiValued="true"/>
    
    <dynamicField name="*_p_f"  type="pfloat"    indexed="true"  stored="true"/>
    <dynamicField name="*_p_f_dv"  type="pfloat"    indexed="true"  stored="true" docValues="true"/>
    <dynamicField name="*_p_f_mv"  type="pfloat"    indexed="true"  stored="true" multiValued="true"/>
    <dynamicField name="*_p_f_mv_dv"  type="pfloat"    indexed="true"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_f_ni"  type="pfloat"    indexed="false"  stored="true" docValues="false"/>
    <dynamicField name="*_p_f_ni_dv"  type="pfloat"    indexed="false"  stored="true" docValues="true"/>
+   <dynamicField name="*_p_f_ni_dv_ns"  type="pfloat"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false"/>
+   <dynamicField name="*_p_f_ni_dv_ns_mv"  type="pfloat"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false" multiValued="true"/>
+   <dynamicField name="*_p_f_ni_mv"  type="pfloat"    indexed="false"  stored="true" docValues="false" multiValued="true"/>
    <dynamicField name="*_p_f_ni_mv_dv"  type="pfloat"    indexed="false"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_f_ni_ns"  type="pfloat"    indexed="false"  stored="false" docValues="false"/>
+   <dynamicField name="*_p_f_ni_ns_mv"  type="pfloat"    indexed="false"  stored="false" docValues="false" multiValued="true"/>
 
    <dynamicField name="*_p_dt"  type="pdate"    indexed="true"  stored="true"/>
    <dynamicField name="*_p_dt_dv"  type="pdate"    indexed="true"  stored="true" docValues="true"/>
    <dynamicField name="*_p_dt_mv"  type="pdate"    indexed="true"  stored="true" multiValued="true"/>
    <dynamicField name="*_p_dt_mv_dv"  type="pdate"    indexed="true"  stored="true" docValues="true" multiValued="true"/>
+   <dynamicField name="*_p_dt_ni"  type="pdate"    indexed="false"  stored="true" docValues="false"/>
    <dynamicField name="*_p_dt_ni_dv"  type="pdate"    indexed="false"  stored="true" docValues="true"/>
+   <dynamicField name="*_p_dt_ni_dv_ns"  type="pdate"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false"/>
+   <dynamicField name="*_p_dt_ni_dv_ns_mv"  type="pdate"    indexed="false"  stored="false" docValues="true" useDocValuesAsStored="false" multiValued="true"/>
+   <dynamicField name="*_p_dt_ni_mv"  type="pdate"    indexed="false"  stored="true" docValues="false" multiValued="true"/>
    <dynamicField name="*_p_dt_ni_mv_dv"  type="pdate"    indexed="false"  stored="true" docValues="true" multiValued="true"/>
-   
+   <dynamicField name="*_p_dt_ni_ns"  type="pdate"    indexed="false"  stored="false" docValues="false"/>
+   <dynamicField name="*_p_dt_ni_ns_mv"  type="pdate"    indexed="false"  stored="false" docValues="false" multiValued="true"/>
+
+
+   <!-- NOTE: https://issues.apache.org/jira/browse/SOLR-10438
+        
+        NOTE: because schema version=1.6, *all* DV fields default to useDocValuesAsStored="true"
+        
+        NOTE: we need to audit if this is breaking any assumptions elsewhere in the test code
+   -->
    <!-- return DV fields as  -->
    <dynamicField name="*_p_i_dv_ns"  type="pint"    indexed="true"  stored="false" docValues="true" useDocValuesAsStored="true"/>
    <dynamicField name="*_p_l_dv_ns"  type="plong"    indexed="true"  stored="false" docValues="true" useDocValuesAsStored="true"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test-files/solr/collection1/conf/schema-rest.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-rest.xml b/solr/core/src/test-files/solr/collection1/conf/schema-rest.xml
index aa4c21d..85c822a 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-rest.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-rest.xml
@@ -494,7 +494,7 @@
   <fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
 
   <!-- Field type where english stopwords are managed by the REST API -->
-  <fieldType name="managed_en" class="solr.TextField">
+  <fieldType name="managed_en" class="solr.TextField" autoGeneratePhraseQueries="false">
     <analyzer>
       <tokenizer class="solr.StandardTokenizerFactory"/>
       <filter class="solr.ManagedStopFilterFactory" managed="english"/>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test-files/solr/collection1/conf/schema12.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema12.xml b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
index 7ea770b..5a85c1f 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema12.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
@@ -467,7 +467,14 @@
 
   <fieldType name="currency" class="solr.CurrencyField" currencyConfig="currency.xml" multiValued="false"/>
 
-
+  <fieldType name="shingle23" class="solr.TextField" enableGraphQueries="false" multiValued="true">
+    <analyzer>
+      <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+      <filter class="solr.ShingleFilterFactory" minShingleSize="2" maxShingleSize="3" 
+              tokenSeparator="_" outputUnigrams="false"/>
+    </analyzer>
+  </fieldType>
+  
   <field name="id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>
   <field name="signatureField" type="string" indexed="true" stored="false"/>
   <field name="uuid" type="uuid" stored="true"/>
@@ -568,6 +575,8 @@
   <field name="uniq2" type="string" indexed="true" stored="true" multiValued="true"/>
   <field name="uniq3" type="string" indexed="true" stored="true"/>
   <field name="nouniq" type="string" indexed="true" stored="true" multiValued="true"/>
+  
+  <field name="shingle23" type="shingle23" indexed="true" stored="true"/>
 
 
   <!--

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test-files/solr/collection1/conf/solrconfig-collapseqparser.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-collapseqparser.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-collapseqparser.xml
index ff19baa..3ac0b50 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-collapseqparser.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-collapseqparser.xml
@@ -239,75 +239,14 @@ based HashBitset. -->
 
   <searchComponent name="spellcheck" class="org.apache.solr.handler.component.SpellCheckComponent">
     <!-- This is slightly different from the field value so we can test dealing with token offset changes -->
-    <str name="queryAnalyzerFieldType">lowerpunctfilt</str>
+    <str name="queryAnalyzerFieldType">a_s</str>
 
-    <lst name="spellchecker">
-      <str name="name">default</str>
-      <str name="field">lowerfilt</str>
-      <str name="spellcheckIndexDir">spellchecker1</str>
-      <str name="buildOnCommit">false</str>
-    </lst>
-    <lst name="spellchecker">
+   <lst name="spellchecker">
       <str name="name">direct</str>
       <str name="classname">DirectSolrSpellChecker</str>
-      <str name="field">lowerfilt</str>
+      <str name="field">a_s</str>
       <int name="minQueryLength">3</int>
-    </lst>
-    <lst name="spellchecker">
-      <str name="name">wordbreak</str>
-      <str name="classname">solr.WordBreakSolrSpellChecker</str>
-      <str name="field">lowerfilt</str>
-      <str name="combineWords">true</str>
-      <str name="breakWords">true</str>
-      <int name="maxChanges">10</int>
-    </lst>
-    <lst name="spellchecker">
-      <str name="name">multipleFields</str>
-      <str name="field">lowerfilt1and2</str>
-      <str name="spellcheckIndexDir">spellcheckerMultipleFields</str>
-      <str name="buildOnCommit">false</str>
-    </lst>
-    <!-- Example of using different distance measure -->
-    <lst name="spellchecker">
-      <str name="name">jarowinkler</str>
-      <str name="field">lowerfilt</str>
-      <!-- Use a different Distance Measure -->
-      <str name="distanceMeasure">org.apache.lucene.search.spell.JaroWinklerDistance</str>
-      <str name="spellcheckIndexDir">spellchecker2</str>
-
-    </lst>
-    <lst name="spellchecker">
-      <str name="classname">solr.FileBasedSpellChecker</str>
-      <str name="name">external</str>
-      <str name="sourceLocation">spellings.txt</str>
-      <str name="characterEncoding">UTF-8</str>
-      <str name="spellcheckIndexDir">spellchecker3</str>
-    </lst>
-    <!-- Comparator -->
-    <lst name="spellchecker">
-      <str name="name">freq</str>
-      <str name="field">lowerfilt</str>
-      <str name="spellcheckIndexDir">spellcheckerFreq</str>
-      <!-- comparatorClass be one of:
-        1. score (default)
-        2. freq (Frequency first, then score)
-        3. A fully qualified class name
-       -->
-      <str name="comparatorClass">freq</str>
-      <str name="buildOnCommit">false</str>
-    </lst>
-    <lst name="spellchecker">
-      <str name="name">fqcn</str>
-      <str name="field">lowerfilt</str>
-      <str name="spellcheckIndexDir">spellcheckerFQCN</str>
-      <str name="comparatorClass">org.apache.solr.spelling.SampleComparator</str>
-      <str name="buildOnCommit">false</str>
-    </lst>
-    <lst name="spellchecker">
-      <str name="name">perDict</str>
-      <str name="classname">org.apache.solr.handler.component.DummyCustomParamSpellChecker</str>
-      <str name="field">lowerfilt</str>
-    </lst>
+    </lst>    
   </searchComponent>
 
   <searchComponent name="termsComp" class="org.apache.solr.handler.component.TermsComponent"/>
@@ -323,19 +262,6 @@ based HashBitset. -->
    -->
   <queryConverter name="queryConverter" class="org.apache.solr.spelling.SpellingQueryConverter"/>
 
-  <requestHandler name="spellCheckCompRH" class="org.apache.solr.handler.component.SearchHandler">
-    <lst name="defaults">
-      <!-- omp = Only More Popular -->
-      <str name="spellcheck.onlyMorePopular">false</str>
-      <!-- exr = Extended Results -->
-      <str name="spellcheck.extendedResults">false</str>
-      <!--  The number of suggestions to return -->
-      <str name="spellcheck.count">1</str>
-    </lst>
-    <arr name="last-components">
-      <str>spellcheck</str>
-    </arr>
-  </requestHandler>
   <requestHandler name="spellCheckCompRH_Direct" class="org.apache.solr.handler.component.SearchHandler">
     <lst name="defaults">
       <str name="spellcheck.dictionary">direct</str>
@@ -347,35 +273,6 @@ based HashBitset. -->
       <str>spellcheck</str>
     </arr>
   </requestHandler>
-  <requestHandler name="spellCheckWithWordbreak" class="org.apache.solr.handler.component.SearchHandler">
-    <lst name="defaults">
-      <str name="spellcheck.dictionary">default</str>
-      <str name="spellcheck.dictionary">wordbreak</str>
-      <str name="spellcheck.count">20</str>
-    </lst>
-    <arr name="last-components">
-      <str>spellcheck</str>
-    </arr>
-  </requestHandler>
-  <requestHandler name="spellCheckWithWordbreak_Direct" class="org.apache.solr.handler.component.SearchHandler">
-    <lst name="defaults">
-      <str name="spellcheck.dictionary">direct</str>
-      <str name="spellcheck.dictionary">wordbreak</str>
-      <str name="spellcheck.count">20</str>
-    </lst>
-    <arr name="last-components">
-      <str>spellcheck</str>
-    </arr>
-  </requestHandler>
-  <requestHandler name="spellCheckCompRH1" class="org.apache.solr.handler.component.SearchHandler">
-    <lst name="defaults">
-      <str name="defType">dismax</str>
-      <str name="qf">lowerfilt1^1</str>
-    </lst>
-    <arr name="last-components">
-      <str>spellcheck</str>
-    </arr>
-  </requestHandler>
 
   <requestHandler name="mltrh" class="org.apache.solr.handler.component.SearchHandler">
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test-files/solr/solr-hiddensysprops.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/solr-hiddensysprops.xml b/solr/core/src/test-files/solr/solr-hiddensysprops.xml
new file mode 100644
index 0000000..20e5aec
--- /dev/null
+++ b/solr/core/src/test-files/solr/solr-hiddensysprops.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<solr>
+ <metrics>
+  <hiddenSysProps>
+    <str>foo</str>
+    <str>bar</str>
+    <str>baz</str>
+  </hiddenSysProps>
+  <!-- this reporter doesn't specify 'group' or 'registry', it will be instantiated for any group. -->
+  <reporter name="universal" class="org.apache.solr.metrics.reporters.MockMetricReporter">
+    <str name="configurable">configured</str>
+  </reporter>
+ </metrics>
+</solr>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test-files/solr/solr-jmxreporter.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/solr-jmxreporter.xml b/solr/core/src/test-files/solr/solr-jmxreporter.xml
new file mode 100644
index 0000000..bb9d05d
--- /dev/null
+++ b/solr/core/src/test-files/solr/solr-jmxreporter.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<solr>
+  <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+    <str name="urlScheme">${urlScheme:}</str>
+    <int name="socketTimeout">${socketTimeout:90000}</int>
+    <int name="connTimeout">${connTimeout:15000}</int>
+  </shardHandlerFactory>
+
+  <solrcloud>
+    <str name="host">127.0.0.1</str>
+    <int name="hostPort">${hostPort:8983}</int>
+    <str name="hostContext">${hostContext:solr}</str>
+    <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+    <int name="leaderVoteWait">${leaderVoteWait:10000}</int>
+    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
+    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
+    <int name="autoReplicaFailoverWaitAfterExpiration">${autoReplicaFailoverWaitAfterExpiration:10000}</int>
+    <int name="autoReplicaFailoverWorkLoopDelay">${autoReplicaFailoverWorkLoopDelay:10000}</int>
+    <int name="autoReplicaFailoverBadNodeExpiration">${autoReplicaFailoverBadNodeExpiration:60000}</int>
+  </solrcloud>
+
+  <metrics>
+    <reporter name="defaultJmx" class="org.apache.solr.metrics.reporters.SolrJmxReporter"/>
+  </metrics>
+</solr>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test-files/solr/solr-solrreporter.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/solr-solrreporter.xml b/solr/core/src/test-files/solr/solr-solrreporter.xml
index db03e42..a66d9d0 100644
--- a/solr/core/src/test-files/solr/solr-solrreporter.xml
+++ b/solr/core/src/test-files/solr/solr-solrreporter.xml
@@ -38,6 +38,10 @@
   </solrcloud>
 
   <metrics>
+    <!-- disable default JMX reporter to avoid conflicts with multiple CoreContainers. -->
+    <reporter name="defaultJmx" class="org.apache.solr.metrics.reporters.SolrJmxReporter">
+      <bool name="enabled">false</bool>
+    </reporter>
     <reporter name="test" group="shard">
       <int name="period">5</int>
       <str name="filter">UPDATE\./update/.*requests</str>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
index f4a14db..02ae888 100644
--- a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
+++ b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
@@ -27,6 +27,8 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Metric;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.LazyDocument;
@@ -38,6 +40,7 @@ import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
@@ -122,10 +125,14 @@ public class BasicFunctionalityTest extends SolrTestCaseJ4 {
     assertNotNull(core.getRequestHandler("mock"));
 
     // test stats call
-    NamedList stats = core.getStatistics();
-    assertEquals("collection1", stats.get("coreName"));
-    assertTrue(stats.get("refCount") != null);
-    
+    SolrMetricManager manager = core.getCoreContainer().getMetricManager();
+    String registry = core.getCoreMetricManager().getRegistryName();
+    Map<String, Metric> metrics = manager.registry(registry).getMetrics();
+    assertTrue(metrics.containsKey("CORE.coreName"));
+    assertTrue(metrics.containsKey("CORE.refCount"));
+    Gauge<Number> g = (Gauge<Number>)metrics.get("CORE.refCount");
+    assertTrue(g.getValue().intValue() > 0);
+
     lrf.args.put(CommonParams.VERSION,"2.2");
     assertQ("test query on empty index",
             req("qlkciyopsbgzyvkylsjhchghjrdf")
@@ -378,8 +385,6 @@ public class BasicFunctionalityTest extends SolrTestCaseJ4 {
         @Override
         public String getDescription() { return tmp; }
         @Override
-        public String getSource() { return tmp; }
-        @Override
         public void handleRequestBody
           ( SolrQueryRequest req, SolrQueryResponse rsp ) {
           throw new RuntimeException(tmp);


[15/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java b/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java
new file mode 100644
index 0000000..f43c60b
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics;
+
+import javax.management.Attribute;
+import javax.management.AttributeList;
+import javax.management.AttributeNotFoundException;
+import javax.management.DynamicMBean;
+import javax.management.InvalidAttributeValueException;
+import javax.management.MBeanAttributeInfo;
+import javax.management.MBeanException;
+import javax.management.MBeanInfo;
+import javax.management.ReflectionException;
+import javax.management.openmbean.OpenMBeanAttributeInfoSupport;
+import javax.management.openmbean.OpenType;
+import javax.management.openmbean.SimpleType;
+import java.lang.invoke.MethodHandles;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.BiConsumer;
+
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Metric;
+import org.apache.lucene.store.AlreadyClosedException;
+import org.apache.solr.common.SolrException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Dynamically constructed map of metrics, intentionally different from {@link com.codahale.metrics.MetricSet}
+ * where each metric had to be known in advance and registered separately in {@link com.codahale.metrics.MetricRegistry}.
+ * <p>Note: this awkwardly extends {@link Gauge} and not {@link Metric} because awkwardly {@link Metric} instances
+ * are not supported by {@link com.codahale.metrics.MetricRegistryListener} :(</p>
+ * <p>Note 2: values added to this metric map should belong to the list of types supported by JMX:
+ * {@link javax.management.openmbean.OpenType#ALLOWED_CLASSNAMES_LIST}, otherwise only their toString()
+ * representation will be shown in JConsole.</p>
+ */
+public class MetricsMap implements Gauge<Map<String,Object>>, DynamicMBean {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  // set to true to use cached statistics between getMBeanInfo calls to work
+  // around over calling getStatistics on MBeanInfos when iterating over all attributes (SOLR-6586)
+  private final boolean useCachedStatsBetweenGetMBeanInfoCalls = Boolean.getBoolean("useCachedStatsBetweenGetMBeanInfoCalls");
+
+  private BiConsumer<Boolean, Map<String, Object>> initializer;
+  private volatile Map<String,Object> cachedValue;
+
+  public MetricsMap(BiConsumer<Boolean, Map<String,Object>> initializer) {
+    this.initializer = initializer;
+  }
+
+  @Override
+  public Map<String,Object> getValue() {
+    return getValue(true);
+  }
+
+  public Map<String,Object> getValue(boolean detailed) {
+    Map<String,Object> map = new HashMap<>();
+    initializer.accept(detailed, map);
+    return map;
+  }
+
+  public String toString() {
+    return getValue().toString();
+  }
+
+  @Override
+  public Object getAttribute(String attribute) throws AttributeNotFoundException, MBeanException, ReflectionException {
+    Object val;
+    Map<String,Object> stats = null;
+    if (useCachedStatsBetweenGetMBeanInfoCalls) {
+      Map<String,Object> cachedStats = this.cachedValue;
+      if (cachedStats != null) {
+        stats = cachedStats;
+      }
+    }
+    if (stats == null) {
+      stats = getValue(true);
+    }
+    val = stats.get(attribute);
+
+    if (val != null) {
+      // It's String or one of the simple types, just return it as JMX suggests direct support for such types
+      for (String simpleTypeName : SimpleType.ALLOWED_CLASSNAMES_LIST) {
+        if (val.getClass().getName().equals(simpleTypeName)) {
+          return val;
+        }
+      }
+      // It's an arbitrary object which could be something complex and odd, return its toString, assuming that is
+      // a workable representation of the object
+      return val.toString();
+    }
+    return null;
+  }
+
+  @Override
+  public void setAttribute(Attribute attribute) throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException {
+    throw new UnsupportedOperationException("Operation not Supported");
+  }
+
+  @Override
+  public AttributeList getAttributes(String[] attributes) {
+    AttributeList list = new AttributeList();
+    for (String attribute : attributes) {
+      try {
+        list.add(new Attribute(attribute, getAttribute(attribute)));
+      } catch (Exception e) {
+        log.warn("Could not get attribute " + attribute);
+      }
+    }
+    return list;
+  }
+
+  @Override
+  public AttributeList setAttributes(AttributeList attributes) {
+    throw new UnsupportedOperationException("Operation not Supported");
+  }
+
+  @Override
+  public Object invoke(String actionName, Object[] params, String[] signature) throws MBeanException, ReflectionException {
+    throw new UnsupportedOperationException("Operation not Supported");
+  }
+
+  @Override
+  public MBeanInfo getMBeanInfo() {
+    ArrayList<MBeanAttributeInfo> attrInfoList = new ArrayList<>();
+    Map<String,Object> stats = getValue(true);
+    if (useCachedStatsBetweenGetMBeanInfoCalls) {
+      cachedValue = stats;
+    }
+    try {
+      stats.forEach((k, v) -> {
+        Class type = v.getClass();
+        OpenType typeBox = determineType(type);
+        if (type.equals(String.class) || typeBox == null) {
+          attrInfoList.add(new MBeanAttributeInfo(k, String.class.getName(),
+              null, true, false, false));
+        } else {
+          attrInfoList.add(new OpenMBeanAttributeInfoSupport(
+              k, k, typeBox, true, false, false));
+        }
+      });
+    } catch (Exception e) {
+      // don't log issue if the core is closing
+      if (!(SolrException.getRootCause(e) instanceof AlreadyClosedException))
+        log.warn("Could not get attributes of MetricsMap: {}", this, e);
+    }
+    MBeanAttributeInfo[] attrInfoArr = attrInfoList
+        .toArray(new MBeanAttributeInfo[attrInfoList.size()]);
+    return new MBeanInfo(getClass().getName(), "MetricsMap", attrInfoArr, null, null, null);
+  }
+
+  private OpenType determineType(Class type) {
+    try {
+      for (Field field : SimpleType.class.getFields()) {
+        if (field.getType().equals(SimpleType.class)) {
+          SimpleType candidate = (SimpleType) field.get(SimpleType.class);
+          if (candidate.getTypeName().equals(type.getName())) {
+            return candidate;
+          }
+        }
+      }
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    return null;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/OperatingSystemMetricSet.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/OperatingSystemMetricSet.java b/solr/core/src/java/org/apache/solr/metrics/OperatingSystemMetricSet.java
index 34ef5d1..21957eb 100644
--- a/solr/core/src/java/org/apache/solr/metrics/OperatingSystemMetricSet.java
+++ b/solr/core/src/java/org/apache/solr/metrics/OperatingSystemMetricSet.java
@@ -16,77 +16,31 @@
  */
 package org.apache.solr.metrics;
 
-import javax.management.JMException;
-import javax.management.MBeanAttributeInfo;
-import javax.management.MBeanInfo;
-import javax.management.MBeanServer;
-import javax.management.ObjectName;
-import java.lang.invoke.MethodHandles;
+import java.lang.management.ManagementFactory;
+import java.lang.management.OperatingSystemMXBean;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
-import java.util.Set;
 
-import com.codahale.metrics.JmxAttributeGauge;
 import com.codahale.metrics.Metric;
 import com.codahale.metrics.MetricSet;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.solr.util.stats.MetricUtils;
 
 /**
  * This is an extended replacement for {@link com.codahale.metrics.jvm.FileDescriptorRatioGauge}
- * - that class uses reflection and doesn't work under Java 9. We can also get much more
- * information about OS environment once we have to go through MBeanServer anyway.
+ * - that class uses reflection and doesn't work under Java 9. This implementation tries to retrieve
+ * bean properties from known implementations of {@link java.lang.management.OperatingSystemMXBean}.
  */
 public class OperatingSystemMetricSet implements MetricSet {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  /** Metric names - these correspond to known numeric MBean attributes. Depending on the OS and
-   * Java implementation only some of them may be actually present.
-   */
-  public static final String[] METRICS = {
-      "AvailableProcessors",
-      "CommittedVirtualMemorySize",
-      "FreePhysicalMemorySize",
-      "FreeSwapSpaceSize",
-      "MaxFileDescriptorCount",
-      "OpenFileDescriptorCount",
-      "ProcessCpuLoad",
-      "ProcessCpuTime",
-      "SystemLoadAverage",
-      "TotalPhysicalMemorySize",
-      "TotalSwapSpaceSize"
-  };
-
-  private final MBeanServer mBeanServer;
-
-  public OperatingSystemMetricSet(MBeanServer mBeanServer) {
-    this.mBeanServer = mBeanServer;
-  }
 
   @Override
   public Map<String, Metric> getMetrics() {
     final Map<String, Metric> metrics = new HashMap<>();
-
-    try {
-      final ObjectName on = new ObjectName("java.lang:type=OperatingSystem");
-      // verify that it exists
-      MBeanInfo info = mBeanServer.getMBeanInfo(on);
-      // collect valid attributes
-      Set<String> attributes = new HashSet<>();
-      for (MBeanAttributeInfo ai : info.getAttributes()) {
-        attributes.add(ai.getName());
-      }
-      for (String metric : METRICS) {
-        // verify that an attribute exists before attempting to add it
-        if (attributes.contains(metric)) {
-          metrics.put(metric, new JmxAttributeGauge(mBeanServer, on, metric));
-        }
+    OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
+    MetricUtils.addMXBeanMetrics(os, MetricUtils.OS_MXBEAN_CLASSES, null, (k, v) -> {
+      if (!metrics.containsKey(k)) {
+        metrics.put(k, v);
       }
-    } catch (JMException ignored) {
-      log.debug("Unable to load OperatingSystem MBean", ignored);
-    }
-
+    });
     return metrics;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
index 43f3535..8de053c 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
@@ -20,11 +20,12 @@ import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 
+import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.cloud.CloudDescriptor;
 import org.apache.solr.core.NodeConfig;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -54,7 +55,7 @@ public class SolrCoreMetricManager implements Closeable {
   public SolrCoreMetricManager(SolrCore core) {
     this.core = core;
     this.tag = String.valueOf(core.hashCode());
-    this.metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
+    this.metricManager = core.getCoreContainer().getMetricManager();
     initCloudMode();
     registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
     leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
@@ -76,14 +77,14 @@ public class SolrCoreMetricManager implements Closeable {
   }
 
   /**
-   * Load reporters configured globally and specific to {@link org.apache.solr.core.SolrInfoMBean.Group#core}
+   * Load reporters configured globally and specific to {@link org.apache.solr.core.SolrInfoBean.Group#core}
    * group or with a registry name specific to this core.
    */
   public void loadReporters() {
-    NodeConfig nodeConfig = core.getCoreDescriptor().getCoreContainer().getConfig();
+    NodeConfig nodeConfig = core.getCoreContainer().getConfig();
     PluginInfo[] pluginInfos = nodeConfig.getMetricReporterPlugins();
     metricManager.loadReporters(pluginInfos, core.getResourceLoader(), tag,
-        SolrInfoMBean.Group.core, registryName);
+        SolrInfoBean.Group.core, registryName);
     if (cloudMode) {
       metricManager.loadShardReporters(pluginInfos, core);
     }
@@ -127,11 +128,25 @@ public class SolrCoreMetricManager implements Closeable {
   }
 
   /**
+   * Return the registry used by this SolrCore.
+   */
+  public MetricRegistry getRegistry() {
+    if (registryName != null) {
+      return metricManager.registry(registryName);
+    } else {
+      return null;
+    }
+  }
+
+  /**
    * Closes reporters specific to this core.
    */
   @Override
   public void close() throws IOException {
     metricManager.closeReporters(getRegistryName(), tag);
+    if (getLeaderRegistryName() != null) {
+      metricManager.closeReporters(getLeaderRegistryName(), tag);
+    }
   }
 
   public SolrCore getCore() {
@@ -176,9 +191,9 @@ public class SolrCoreMetricManager implements Closeable {
 
   public static String createRegistryName(boolean cloud, String collectionName, String shardName, String replicaName, String coreName) {
     if (cloud) { // build registry name from logical names
-      return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shardName, replicaName);
+      return SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, collectionName, shardName, replicaName);
     } else {
-      return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, coreName);
+      return SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, coreName);
     }
   }
 
@@ -224,7 +239,7 @@ public class SolrCoreMetricManager implements Closeable {
 
   public static String createLeaderRegistryName(boolean cloud, String collectionName, String shardName) {
     if (cloud) {
-      return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.collection, collectionName, shardName, "leader");
+      return SolrMetricManager.getRegistryName(SolrInfoBean.Group.collection, collectionName, shardName, "leader");
     } else {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/SolrMetricInfo.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricInfo.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricInfo.java
index 4d093eb..8edfa04 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricInfo.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricInfo.java
@@ -17,7 +17,7 @@
 package org.apache.solr.metrics;
 
 import com.codahale.metrics.MetricRegistry;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 
 /**
  * Wraps meta-data for a metric.
@@ -25,7 +25,7 @@ import org.apache.solr.core.SolrInfoMBean;
 public final class SolrMetricInfo {
   public final String name;
   public final String scope;
-  public final SolrInfoMBean.Category category;
+  public final SolrInfoBean.Category category;
 
   /**
    * Creates a new instance of {@link SolrMetricInfo}.
@@ -34,7 +34,7 @@ public final class SolrMetricInfo {
    * @param scope    the scope of the metric (e.g. `/admin/ping`)
    * @param name     the name of the metric (e.g. `Requests`)
    */
-  public SolrMetricInfo(SolrInfoMBean.Category category, String scope, String name) {
+  public SolrMetricInfo(SolrInfoBean.Category category, String scope, String name) {
     this.name = name;
     this.scope = scope;
     this.category = category;
@@ -45,18 +45,25 @@ public final class SolrMetricInfo {
       return null;
     }
     String[] names = fullName.split("\\.");
-    if (names.length < 3) { // not a valid info
+    if (names.length < 2) { // not a valid info
       return null;
     }
     // check top-level name for valid category
-    SolrInfoMBean.Category category;
+    SolrInfoBean.Category category;
     try {
-      category = SolrInfoMBean.Category.valueOf(names[0]);
+      category = SolrInfoBean.Category.valueOf(names[0]);
     } catch (IllegalArgumentException e) { // not a valid category
       return null;
     }
-    String scope = names[1];
-    String name = fullName.substring(names[0].length() + names[1].length() + 2);
+    String scope;
+    String name;
+    if (names.length == 2) {
+      scope = null;
+      name = fullName.substring(names[0].length() + 1);
+    } else {
+      scope = names[1];
+      name = fullName.substring(names[0].length() + names[1].length() + 2);
+    }
     return new SolrMetricInfo(category, scope, name);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index f4abee0..d4eb06a 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -51,7 +51,7 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.metrics.reporters.solr.SolrClusterReporter;
 import org.apache.solr.metrics.reporters.solr.SolrShardReporter;
@@ -69,11 +69,11 @@ import org.slf4j.LoggerFactory;
  * {@link MetricRegistry} instances are automatically created when first referenced by name. Similarly,
  * instances of {@link Metric} implementations, such as {@link Meter}, {@link Counter}, {@link Timer} and
  * {@link Histogram} are automatically created and registered under hierarchical names, in a specified
- * registry, when {@link #meter(String, String, String...)} and other similar methods are called.
+ * registry, when {@link #meter(SolrInfoBean, String, String, String...)} and other similar methods are called.
  * <p>This class enforces a common prefix ({@link #REGISTRY_NAME_PREFIX}) in all registry
  * names.</p>
  * <p>Solr uses several different registries for collecting metrics belonging to different groups, using
- * {@link org.apache.solr.core.SolrInfoMBean.Group} as the main name of the registry (plus the
+ * {@link org.apache.solr.core.SolrInfoBean.Group} as the main name of the registry (plus the
  * above-mentioned prefix). Instances of {@link SolrMetricManager} are created for each {@link org.apache.solr.core.CoreContainer},
  * and most registries are local to each instance, with the exception of two global registries:
  * <code>solr.jetty</code> and <code>solr.jvm</code>, which are shared between all {@link org.apache.solr.core.CoreContainer}-s</p>
@@ -87,11 +87,11 @@ public class SolrMetricManager {
 
   /** Registry name for Jetty-specific metrics. This name is also subject to overrides controlled by
    * system properties. This registry is shared between instances of {@link SolrMetricManager}. */
-  public static final String JETTY_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoMBean.Group.jetty.toString();
+  public static final String JETTY_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoBean.Group.jetty.toString();
 
   /** Registry name for JVM-specific metrics. This name is also subject to overrides controlled by
    * system properties. This registry is shared between instances of {@link SolrMetricManager}. */
-  public static final String JVM_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoMBean.Group.jvm.toString();
+  public static final String JVM_REGISTRY = REGISTRY_NAME_PREFIX + SolrInfoBean.Group.jvm.toString();
 
   private final ConcurrentMap<String, MetricRegistry> registries = new ConcurrentHashMap<>();
 
@@ -247,6 +247,66 @@ public class SolrMetricManager {
     }
   }
 
+  public static class OrFilter implements MetricFilter {
+    List<MetricFilter> filters = new ArrayList<>();
+
+    public OrFilter(Collection<MetricFilter> filters) {
+      if (filters != null) {
+        this.filters.addAll(filters);
+      }
+    }
+
+    public OrFilter(MetricFilter... filters) {
+      if (filters != null) {
+        for (MetricFilter filter : filters) {
+          if (filter != null) {
+            this.filters.add(filter);
+          }
+        }
+      }
+    }
+
+    @Override
+    public boolean matches(String s, Metric metric) {
+      for (MetricFilter filter : filters) {
+        if (filter.matches(s, metric)) {
+          return true;
+        }
+      }
+      return false;
+    }
+  }
+
+  public static class AndFilter implements MetricFilter {
+    List<MetricFilter> filters = new ArrayList<>();
+
+    public AndFilter(Collection<MetricFilter> filters) {
+      if (filters != null) {
+        this.filters.addAll(filters);
+      }
+    }
+
+    public AndFilter(MetricFilter... filters) {
+      if (filters != null) {
+        for (MetricFilter filter : filters) {
+          if (filter != null) {
+            this.filters.add(filter);
+          }
+        }
+      }
+    }
+
+    @Override
+    public boolean matches(String s, Metric metric) {
+      for (MetricFilter filter : filters) {
+        if (!filter.matches(s, metric)) {
+          return false;
+        }
+      }
+      return true;
+    }
+  }
+
   /**
    * Return a set of existing registry names.
    */
@@ -452,6 +512,21 @@ public class SolrMetricManager {
   }
 
   /**
+   * Retrieve matching metrics and their names.
+   * @param registry registry name.
+   * @param metricFilter filter (null is equivalent to {@link MetricFilter#ALL}).
+   * @return map of matching names and metrics
+   */
+  public Map<String, Metric> getMetrics(String registry, MetricFilter metricFilter) {
+    if (metricFilter == null || metricFilter == MetricFilter.ALL) {
+      return registry(registry).getMetrics();
+    }
+    return registry(registry).getMetrics().entrySet().stream()
+        .filter(entry -> metricFilter.matches(entry.getKey(), entry.getValue()))
+        .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
+  }
+
+  /**
    * Create or get an existing named {@link Meter}
    * @param registry registry name
    * @param metricName metric name, either final name or a fully-qualified name
@@ -459,8 +534,12 @@ public class SolrMetricManager {
    * @param metricPath (optional) additional top-most metric name path elements
    * @return existing or a newly created {@link Meter}
    */
-  public Meter meter(String registry, String metricName, String... metricPath) {
-    return registry(registry).meter(mkName(metricName, metricPath));
+  public Meter meter(SolrInfoBean info, String registry, String metricName, String... metricPath) {
+    final String name = mkName(metricName, metricPath);
+    if (info != null) {
+      info.registerMetricName(name);
+    }
+    return registry(registry).meter(name);
   }
 
   /**
@@ -471,8 +550,12 @@ public class SolrMetricManager {
    * @param metricPath (optional) additional top-most metric name path elements
    * @return existing or a newly created {@link Timer}
    */
-  public Timer timer(String registry, String metricName, String... metricPath) {
-    return registry(registry).timer(mkName(metricName, metricPath));
+  public Timer timer(SolrInfoBean info, String registry, String metricName, String... metricPath) {
+    final String name = mkName(metricName, metricPath);
+    if (info != null) {
+      info.registerMetricName(name);
+    }
+    return registry(registry).timer(name);
   }
 
   /**
@@ -483,8 +566,12 @@ public class SolrMetricManager {
    * @param metricPath (optional) additional top-most metric name path elements
    * @return existing or a newly created {@link Counter}
    */
-  public Counter counter(String registry, String metricName, String... metricPath) {
-    return registry(registry).counter(mkName(metricName, metricPath));
+  public Counter counter(SolrInfoBean info, String registry, String metricName, String... metricPath) {
+    final String name = mkName(metricName, metricPath);
+    if (info != null) {
+      info.registerMetricName(name);
+    }
+    return registry(registry).counter(name);
   }
 
   /**
@@ -495,8 +582,12 @@ public class SolrMetricManager {
    * @param metricPath (optional) additional top-most metric name path elements
    * @return existing or a newly created {@link Histogram}
    */
-  public Histogram histogram(String registry, String metricName, String... metricPath) {
-    return registry(registry).histogram(mkName(metricName, metricPath));
+  public Histogram histogram(SolrInfoBean info, String registry, String metricName, String... metricPath) {
+    final String name = mkName(metricName, metricPath);
+    if (info != null) {
+      info.registerMetricName(name);
+    }
+    return registry(registry).histogram(name);
   }
 
   /**
@@ -510,9 +601,12 @@ public class SolrMetricManager {
    *                   using dotted notation
    * @param metricPath (optional) additional top-most metric name path elements
    */
-  public void register(String registry, Metric metric, boolean force, String metricName, String... metricPath) {
+  public void register(SolrInfoBean info, String registry, Metric metric, boolean force, String metricName, String... metricPath) {
     MetricRegistry metricRegistry = registry(registry);
     String fullName = mkName(metricName, metricPath);
+    if (info != null) {
+      info.registerMetricName(fullName);
+    }
     synchronized (metricRegistry) {
       if (force && metricRegistry.getMetrics().containsKey(fullName)) {
         metricRegistry.remove(fullName);
@@ -521,8 +615,8 @@ public class SolrMetricManager {
     }
   }
 
-  public void registerGauge(String registry, Gauge<?> gauge, boolean force, String metricName, String... metricPath) {
-    register(registry, gauge, force, metricName, metricPath);
+  public void registerGauge(SolrInfoBean info, String registry, Gauge<?> gauge, boolean force, String metricName, String... metricPath) {
+    register(info, registry, gauge, force, metricName, metricPath);
   }
 
   /**
@@ -569,7 +663,7 @@ public class SolrMetricManager {
    * </pre>
    * <b>NOTE:</b> Once a registry is renamed in a way that its metrics are combined with another repository
    * it is no longer possible to retrieve the original metrics until this renaming is removed and the Solr
-   * {@link org.apache.solr.core.SolrInfoMBean.Group} of components that reported to that name is restarted.
+   * {@link org.apache.solr.core.SolrInfoBean.Group} of components that reported to that name is restarted.
    * @param registry The name of the registry
    * @return A potentially overridden (via System properties) registry name
    */
@@ -600,7 +694,7 @@ public class SolrMetricManager {
    *              and the group parameter will be ignored.
    * @return fully-qualified and prefixed registry name, with overrides applied.
    */
-  public static String getRegistryName(SolrInfoMBean.Group group, String... names) {
+  public static String getRegistryName(SolrInfoBean.Group group, String... names) {
     String fullName;
     String prefix = REGISTRY_NAME_PREFIX + group.toString() + ".";
     // check for existing prefix and group
@@ -622,7 +716,7 @@ public class SolrMetricManager {
   // reporter management
 
   /**
-   * Create and register {@link SolrMetricReporter}-s specific to a {@link org.apache.solr.core.SolrInfoMBean.Group}.
+   * Create and register {@link SolrMetricReporter}-s specific to a {@link org.apache.solr.core.SolrInfoBean.Group}.
    * Note: reporters that specify neither "group" nor "registry" attributes are treated as universal -
    * they will always be loaded for any group. These two attributes may also contain multiple comma- or
    * whitespace-separated values, in which case the reporter will be loaded for any matching value from
@@ -634,7 +728,7 @@ public class SolrMetricManager {
    * @param group selected group, not null
    * @param registryNames optional child registry name elements
    */
-  public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, String tag, SolrInfoMBean.Group group, String... registryNames) {
+  public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, String tag, SolrInfoBean.Group group, String... registryNames) {
     if (pluginInfos == null || pluginInfos.length == 0) {
       return;
     }
@@ -941,13 +1035,13 @@ public class SolrMetricManager {
     // prepare default plugin if none present in the config
     Map<String, String> attrs = new HashMap<>();
     attrs.put("name", "shardDefault");
-    attrs.put("group", SolrInfoMBean.Group.shard.toString());
+    attrs.put("group", SolrInfoBean.Group.shard.toString());
     Map<String, Object> initArgs = new HashMap<>();
     initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
 
     String registryName = core.getCoreMetricManager().getRegistryName();
     // collect infos and normalize
-    List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.shard.toString(), SolrShardReporter.class.getName(),
+    List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.shard.toString(), SolrShardReporter.class.getName(),
         attrs, initArgs, null);
     for (PluginInfo info : infos) {
       try {
@@ -967,12 +1061,12 @@ public class SolrMetricManager {
     }
     Map<String, String> attrs = new HashMap<>();
     attrs.put("name", "clusterDefault");
-    attrs.put("group", SolrInfoMBean.Group.cluster.toString());
+    attrs.put("group", SolrInfoBean.Group.cluster.toString());
     Map<String, Object> initArgs = new HashMap<>();
     initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
-    List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
+    List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
         attrs, initArgs, null);
-    String registryName = getRegistryName(SolrInfoMBean.Group.cluster);
+    String registryName = getRegistryName(SolrInfoBean.Group.cluster);
     for (PluginInfo info : infos) {
       try {
         SolrMetricReporter reporter = loadReporter(registryName, cc.getResourceLoader(), info, null);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/SolrMetricReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricReporter.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricReporter.java
index ff2d3fc..9ad15d0 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricReporter.java
@@ -30,6 +30,7 @@ public abstract class SolrMetricReporter implements Closeable, PluginInfoInitial
   protected final String registryName;
   protected final SolrMetricManager metricManager;
   protected PluginInfo pluginInfo;
+  protected boolean enabled = true;
 
   /**
    * Create a reporter for metrics managed in a named registry.
@@ -58,6 +59,17 @@ public abstract class SolrMetricReporter implements Closeable, PluginInfoInitial
   }
 
   /**
+   * Enable reporting, defaults to true. Implementations should check this flag in
+   * {@link #validate()} and accordingly enable or disable reporting.
+   * @param enabled enable, defaults to true when null or not set.
+   */
+  public void setEnabled(Boolean enabled) {
+    if (enabled != null) {
+      this.enabled = enabled;
+    }
+  }
+
+  /**
    * Get the effective {@link PluginInfo} instance that was used for
    * initialization of this plugin.
    * @return plugin info, or null if not yet initialized.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java b/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
index 1f5b4f0..4298c18 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
@@ -51,6 +51,20 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
   }
 
   /**
+   * Return current domain.
+   */
+  public String getDomain() {
+    return domain;
+  }
+
+  /**
+   * Return current reporterName.
+   */
+  public String getReporterName() {
+    return reporterName;
+  }
+
+  /**
    * Create a hierarchical name.
    *
    * @param type    metric class, eg. "counters", may be null for non-metric MBeans
@@ -60,7 +74,8 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
   @Override
   public ObjectName createName(String type, String currentDomain, String name) {
     SolrMetricInfo metricInfo = SolrMetricInfo.of(name);
-
+    String safeName = metricInfo != null ? metricInfo.name : name;
+    safeName = safeName.replaceAll(":", "_");
     // It turns out that ObjectName(String) mostly preserves key ordering
     // as specified in the constructor (except for the 'type' key that ends
     // up at top level) - unlike ObjectName(String, Map) constructor
@@ -90,24 +105,42 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
       sb.append(currentDomain);
       sb.append(':');
     }
-    sb.append("reporter=");
-    sb.append(reporterName);
-    sb.append(',');
+    if (props != null && props.length > 0) {
+      boolean added = false;
+      for (int i = 0; i < props.length; i += 2) {
+        if (props[i] == null || props[i].isEmpty()) {
+          continue;
+        }
+        if (props[i + 1] == null || props[i + 1].isEmpty()) {
+          continue;
+        }
+        sb.append(',');
+        sb.append(props[i]);
+        sb.append('=');
+        sb.append(props[i + 1]);
+        added = true;
+      }
+      if (added) {
+        sb.append(',');
+      }
+    }
     if (metricInfo != null) {
       sb.append("category=");
       sb.append(metricInfo.category.toString());
-      sb.append(",scope=");
-      sb.append(metricInfo.scope);
+      if (metricInfo.scope != null) {
+        sb.append(",scope=");
+        sb.append(metricInfo.scope);
+      }
       // we could also split by type, but don't call it 'type' :)
       // if (type != null) {
       //   sb.append(",class=");
       //   sb.append(type);
       // }
       sb.append(",name=");
-      sb.append(metricInfo.name);
+      sb.append(safeName);
     } else {
       // make dotted names into hierarchies
-      String[] path = name.split("\\.");
+      String[] path = safeName.split("\\.");
       for (int i = 0; i < path.length - 1; i++) {
         if (i > 0) {
           sb.append(',');
@@ -127,20 +160,6 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
       sb.append("name=");
       sb.append(path[path.length - 1]);
     }
-    if (props != null && props.length > 0) {
-      for (int i = 0; i < props.length; i += 2) {
-        if (props[i] == null || props[i].isEmpty()) {
-          continue;
-        }
-        if (props[i + 1] == null || props[i + 1].isEmpty()) {
-          continue;
-        }
-        sb.append(',');
-        sb.append(props[i]);
-        sb.append('=');
-        sb.append(props[i + 1]);
-      }
-    }
 
     ObjectName objectName;
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java b/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java
new file mode 100644
index 0000000..5745dec
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters;
+
+import java.io.Closeable;
+import java.lang.invoke.MethodHandles;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Simple cache for reusable service clients used by some implementations of
+ * {@link org.apache.solr.metrics.SolrMetricReporter}.
+ */
+public class ReporterClientCache<T> implements Closeable {
+  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final Map<String, T> cache = new ConcurrentHashMap<>();
+
+  /**
+   * Provide an instance of service client.
+   * @param <T> formal type
+   */
+  public interface ClientProvider<T> {
+    /**
+     * Get an instance of a service client. It's not specified that each time this
+     * method is invoked a new client instance should be returned.
+     * @return client instance
+     * @throws Exception when client creation encountered an error.
+     */
+    T get() throws Exception;
+  }
+
+  /**
+   * Get existing or register a new client.
+   * @param id client id
+   * @param clientProvider provider of new client instances
+   */
+  public synchronized T getOrCreate(String id, ClientProvider<T> clientProvider) {
+    T item = cache.get(id);
+    if (item == null) {
+      try {
+        item = clientProvider.get();
+        cache.put(id, item);
+      } catch (Exception e) {
+        LOG.warn("Error providing a new client for id=" + id, e);
+        item = null;
+      }
+    }
+    return item;
+  }
+
+  /**
+   * Empty this cache, and close all clients that are {@link Closeable}.
+   */
+  public void close() {
+    for (T client : cache.values()) {
+      if (client instanceof Closeable) {
+        try {
+          ((Closeable)client).close();
+        } catch (Exception e) {
+          LOG.warn("Error closing client " + client + ", ignoring...", e);
+        }
+      }
+    }
+    cache.clear();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGangliaReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGangliaReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGangliaReporter.java
index 45561e5..142ddd8 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGangliaReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGangliaReporter.java
@@ -17,6 +17,9 @@
 package org.apache.solr.metrics.reporters;
 
 import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 import com.codahale.metrics.MetricFilter;
@@ -24,21 +27,26 @@ import com.codahale.metrics.ganglia.GangliaReporter;
 import info.ganglia.gmetric4j.gmetric.GMetric;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricReporter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
  */
 public class SolrGangliaReporter extends SolrMetricReporter {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String host = null;
   private int port = -1;
   private boolean multicast;
   private int period = 60;
   private String instancePrefix = null;
-  private String filterPrefix = null;
+  private List<String> filters = new ArrayList<>();
   private boolean testing;
   private GangliaReporter reporter;
 
+  private static final ReporterClientCache<GMetric> serviceRegistry = new ReporterClientCache<>();
+
   // for unit tests
   GMetric ganglia = null;
 
@@ -65,10 +73,24 @@ public class SolrGangliaReporter extends SolrMetricReporter {
     this.instancePrefix = prefix;
   }
 
-  public void setFilter(String filter) {
-    this.filterPrefix = filter;
+  /**
+   * Report only metrics with names matching any of the prefix filters.
+   * @param filters list of 0 or more prefixes. If the list is empty then
+   *                all names will match.
+   */
+  public void setFilter(List<String> filters) {
+    if (filters == null || filters.isEmpty()) {
+      return;
+    }
+    this.filters.addAll(filters);
   }
 
+  // due to vagaries of SolrPluginUtils.invokeSetters we need this too
+  public void setFilter(String filter) {
+    if (filter != null && !filter.isEmpty()) {
+      this.filters.add(filter);
+    }
+  }
 
   public void setPeriod(int period) {
     this.period = period;
@@ -89,6 +111,10 @@ public class SolrGangliaReporter extends SolrMetricReporter {
 
   @Override
   protected void validate() throws IllegalStateException {
+    if (!enabled) {
+      log.info("Reporter disabled for registry " + registryName);
+      return;
+    }
     if (host == null) {
       throw new IllegalStateException("Init argument 'host' must be set to a valid Ganglia server name.");
     }
@@ -106,12 +132,12 @@ public class SolrGangliaReporter extends SolrMetricReporter {
   //this is a separate method for unit tests
   void start() {
     if (!testing) {
-      try {
-        ganglia = new GMetric(host, port,
-            multicast ? GMetric.UDPAddressingMode.MULTICAST : GMetric.UDPAddressingMode.UNICAST,
-            1);
-      } catch (IOException ioe) {
-        throw new IllegalStateException("Exception connecting to Ganglia", ioe);
+      String id = host + ":" + port + ":" + multicast;
+      ganglia = serviceRegistry.getOrCreate(id, () -> new GMetric(host, port,
+          multicast ? GMetric.UDPAddressingMode.MULTICAST : GMetric.UDPAddressingMode.UNICAST,
+          1));
+      if (ganglia == null) {
+        return;
       }
     }
     if (instancePrefix == null) {
@@ -125,8 +151,8 @@ public class SolrGangliaReporter extends SolrMetricReporter {
         .convertDurationsTo(TimeUnit.MILLISECONDS)
         .prefixedWith(instancePrefix);
     MetricFilter filter;
-    if (filterPrefix != null) {
-      filter = new SolrMetricManager.PrefixFilter(filterPrefix);
+    if (!filters.isEmpty()) {
+      filter = new SolrMetricManager.PrefixFilter(filters);
     } else {
       filter = MetricFilter.ALL;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGraphiteReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGraphiteReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGraphiteReporter.java
index 8565ce8..d5b7a20 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGraphiteReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrGraphiteReporter.java
@@ -18,6 +18,8 @@ package org.apache.solr.metrics.reporters;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 import com.codahale.metrics.MetricFilter;
@@ -41,9 +43,11 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
   private int period = 60;
   private boolean pickled = false;
   private String instancePrefix = null;
-  private String filterPrefix = null;
+  private List<String> filters = new ArrayList<>();
   private GraphiteReporter reporter = null;
 
+  private static final ReporterClientCache<GraphiteSender> serviceRegistry = new ReporterClientCache<>();
+
   /**
    * Create a Graphite reporter for metrics managed in a named registry.
    *
@@ -67,10 +71,25 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
     this.instancePrefix = prefix;
   }
 
+  /**
+   * Report only metrics with names matching any of the prefix filters.
+   * @param filters list of 0 or more prefixes. If the list is empty then
+   *                all names will match.
+   */
+  public void setFilter(List<String> filters) {
+    if (filters == null || filters.isEmpty()) {
+      return;
+    }
+    this.filters.addAll(filters);
+  }
+
   public void setFilter(String filter) {
-    this.filterPrefix = filter;
+    if (filter != null && !filter.isEmpty()) {
+      this.filters.add(filter);
+    }
   }
 
+
   public void setPickled(boolean pickled) {
     this.pickled = pickled;
   }
@@ -81,6 +100,10 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
 
   @Override
   protected void validate() throws IllegalStateException {
+    if (!enabled) {
+      log.info("Reporter disabled for registry " + registryName);
+      return;
+    }
     if (host == null) {
       throw new IllegalStateException("Init argument 'host' must be set to a valid Graphite server name.");
     }
@@ -93,12 +116,15 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
     if (period < 1) {
       throw new IllegalStateException("Init argument 'period' is in time unit 'seconds' and must be at least 1.");
     }
-    final GraphiteSender graphite;
-    if (pickled) {
-      graphite = new PickledGraphite(host, port);
-    } else {
-      graphite = new Graphite(host, port);
-    }
+    GraphiteSender graphite;
+    String id = host + ":" + port + ":" + pickled;
+    graphite = serviceRegistry.getOrCreate(id, () -> {
+      if (pickled) {
+        return new PickledGraphite(host, port);
+      } else {
+        return new Graphite(host, port);
+      }
+    });
     if (instancePrefix == null) {
       instancePrefix = registryName;
     } else {
@@ -110,8 +136,8 @@ public class SolrGraphiteReporter extends SolrMetricReporter {
         .convertRatesTo(TimeUnit.SECONDS)
         .convertDurationsTo(TimeUnit.MILLISECONDS);
     MetricFilter filter;
-    if (filterPrefix != null) {
-      filter = new SolrMetricManager.PrefixFilter(filterPrefix);
+    if (!filters.isEmpty()) {
+      filter = new SolrMetricManager.PrefixFilter(filters);
     } else {
       filter = MetricFilter.ALL;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java
index 0e78eee..d09e043 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java
@@ -16,15 +16,25 @@
  */
 package org.apache.solr.metrics.reporters;
 
+import javax.management.InstanceNotFoundException;
 import javax.management.MBeanServer;
+import javax.management.ObjectInstance;
+import javax.management.ObjectName;
 
-import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.lang.management.ManagementFactory;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Locale;
+import java.util.Set;
 
+import com.codahale.metrics.Gauge;
 import com.codahale.metrics.JmxReporter;
+import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.MetricRegistryListener;
 import org.apache.solr.core.PluginInfo;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricReporter;
 import org.apache.solr.util.JmxUtil;
@@ -34,17 +44,25 @@ import org.slf4j.LoggerFactory;
 /**
  * A {@link SolrMetricReporter} that finds (or creates) a MBeanServer from
  * the given configuration and registers metrics to it with JMX.
+ * <p>NOTE: {@link JmxReporter} that this class uses exports only newly added metrics (it doesn't
+ * process already existing metrics in a registry)</p>
  */
 public class SolrJmxReporter extends SolrMetricReporter {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  private static final ReporterClientCache<MBeanServer> serviceRegistry = new ReporterClientCache<>();
+
   private String domain;
   private String agentId;
   private String serviceUrl;
+  private String rootName;
+  private List<String> filters = new ArrayList<>();
 
   private JmxReporter reporter;
+  private MetricRegistry registry;
   private MBeanServer mBeanServer;
+  private MetricsMapListener listener;
 
   /**
    * Creates a new instance of {@link SolrJmxReporter}.
@@ -57,7 +75,7 @@ public class SolrJmxReporter extends SolrMetricReporter {
   }
 
   /**
-   * Initializes the reporter by finding (or creating) a MBeanServer
+   * Initializes the reporter by finding an MBeanServer
    * and registering the metricManager's metric registry.
    *
    * @param pluginInfo the configuration for the reporter
@@ -65,44 +83,62 @@ public class SolrJmxReporter extends SolrMetricReporter {
   @Override
   public synchronized void init(PluginInfo pluginInfo) {
     super.init(pluginInfo);
-
+    if (!enabled) {
+      log.info("Reporter disabled for registry " + registryName);
+      return;
+    }
+    log.debug("Initializing for registry " + registryName);
     if (serviceUrl != null && agentId != null) {
-      ManagementFactory.getPlatformMBeanServer(); // Ensure at least one MBeanServer is available.
       mBeanServer = JmxUtil.findFirstMBeanServer();
-      log.warn("No more than one of serviceUrl(%s) and agentId(%s) should be configured, using first MBeanServer instead of configuration.",
+      log.warn("No more than one of serviceUrl({}) and agentId({}) should be configured, using first MBeanServer instead of configuration.",
           serviceUrl, agentId, mBeanServer);
-    }
-    else if (serviceUrl != null) {
-      try {
-        mBeanServer = JmxUtil.findMBeanServerForServiceUrl(serviceUrl);
-      } catch (IOException e) {
-        log.warn("findMBeanServerForServiceUrl(%s) exception: %s", serviceUrl, e);
-        mBeanServer = null;
-      }
-    }
-    else if (agentId != null) {
+    } else if (serviceUrl != null) {
+      // reuse existing services
+      mBeanServer = serviceRegistry.getOrCreate(serviceUrl, () -> JmxUtil.findMBeanServerForServiceUrl(serviceUrl));
+    } else if (agentId != null) {
       mBeanServer = JmxUtil.findMBeanServerForAgentId(agentId);
     } else {
-      ManagementFactory.getPlatformMBeanServer(); // Ensure at least one MBeanServer is available.
       mBeanServer = JmxUtil.findFirstMBeanServer();
-      log.warn("No serviceUrl or agentId was configured, using first MBeanServer.", mBeanServer);
+      log.debug("No serviceUrl or agentId was configured, using first MBeanServer: " + mBeanServer);
     }
 
     if (mBeanServer == null) {
-      log.warn("No JMX server found. Not exposing Solr metrics.");
+      log.warn("No JMX server found. Not exposing Solr metrics via JMX.");
       return;
     }
 
-    JmxObjectNameFactory jmxObjectNameFactory = new JmxObjectNameFactory(pluginInfo.name, domain);
+    if (domain == null || domain.isEmpty()) {
+      domain = registryName;
+    }
+    String fullDomain = domain;
+    if (rootName != null && !rootName.isEmpty()) {
+      fullDomain = rootName + "." + domain;
+    }
+    JmxObjectNameFactory jmxObjectNameFactory = new JmxObjectNameFactory(pluginInfo.name, fullDomain);
+    registry = metricManager.registry(registryName);
+    // filter out MetricsMap gauges - we have a better way of handling them
+    MetricFilter mmFilter = (name, metric) -> !(metric instanceof MetricsMap);
+    MetricFilter filter;
+    if (filters.isEmpty()) {
+      filter = mmFilter;
+    } else {
+      // apply also prefix filters
+      SolrMetricManager.PrefixFilter prefixFilter = new SolrMetricManager.PrefixFilter(filters);
+      filter = new SolrMetricManager.AndFilter(prefixFilter, mmFilter);
+    }
 
-    reporter = JmxReporter.forRegistry(metricManager.registry(registryName))
+    reporter = JmxReporter.forRegistry(registry)
                           .registerWith(mBeanServer)
-                          .inDomain(domain)
+                          .inDomain(fullDomain)
+                          .filter(filter)
                           .createsObjectNamesWith(jmxObjectNameFactory)
                           .build();
     reporter.start();
+    // workaround for inability to register custom MBeans (to be available in metrics 4.0?)
+    listener = new MetricsMapListener(mBeanServer, jmxObjectNameFactory);
+    registry.addListener(listener);
 
-    log.info("JMX monitoring enabled at server: " + mBeanServer);
+    log.info("JMX monitoring for '" + fullDomain + "' (registry '" + registryName + "') enabled at server: " + mBeanServer);
   }
 
   /**
@@ -114,6 +150,11 @@ public class SolrJmxReporter extends SolrMetricReporter {
       reporter.close();
       reporter = null;
     }
+    if (listener != null && registry != null) {
+      registry.removeListener(listener);
+      listener.close();
+      listener = null;
+    }
   }
 
   /**
@@ -127,9 +168,19 @@ public class SolrJmxReporter extends SolrMetricReporter {
     // Nothing to validate
   }
 
+
+  /**
+   * Set root name of the JMX hierarchy for this reporter. Default (null or empty) is none, ie.
+   * the hierarchy will start from the domain name.
+   * @param rootName root name of the JMX name hierarchy, or null or empty for default.
+   */
+  public void setRootName(String rootName) {
+    this.rootName = rootName;
+  }
+
   /**
    * Sets the domain with which MBeans are published. If none is set,
-   * the domain defaults to the name of the core.
+   * the domain defaults to the name of the registry.
    *
    * @param domain the domain
    */
@@ -162,7 +213,46 @@ public class SolrJmxReporter extends SolrMetricReporter {
   }
 
   /**
-   * Retrieves the reporter's MBeanServer.
+   * Return configured agentId or null.
+   */
+  public String getAgentId() {
+    return agentId;
+  }
+
+  /**
+   * Return configured serviceUrl or null.
+   */
+  public String getServiceUrl() {
+    return serviceUrl;
+  }
+
+  /**
+   * Return configured domain or null.
+   */
+  public String getDomain() {
+    return domain;
+  }
+
+  /**
+   * Report only metrics with names matching any of the prefix filters.
+   * @param filters list of 0 or more prefixes. If the list is empty then
+   *                all names will match.
+   */
+  public void setFilter(List<String> filters) {
+    if (filters == null || filters.isEmpty()) {
+      return;
+    }
+    this.filters.addAll(filters);
+  }
+
+  public void setFilter(String filter) {
+    if (filter != null && !filter.isEmpty()) {
+      this.filters.add(filter);
+    }
+  }
+
+  /**
+   * Return the reporter's MBeanServer.
    *
    * @return the reporter's MBeanServer
    */
@@ -170,10 +260,72 @@ public class SolrJmxReporter extends SolrMetricReporter {
     return mBeanServer;
   }
 
+  /**
+   * For unit tests.
+   * @return true if this reporter is actively reporting metrics to JMX.
+   */
+  public boolean isActive() {
+    return reporter != null;
+  }
+
   @Override
   public String toString() {
-    return String.format(Locale.ENGLISH, "[%s@%s: domain = %s, service url = %s, agent id = %s]",
-        getClass().getName(), Integer.toHexString(hashCode()), domain, serviceUrl, agentId);
+    return String.format(Locale.ENGLISH, "[%s@%s: rootName = %s, domain = %s, service url = %s, agent id = %s]",
+        getClass().getName(), Integer.toHexString(hashCode()), rootName, domain, serviceUrl, agentId);
   }
 
+  private static class MetricsMapListener extends MetricRegistryListener.Base {
+    MBeanServer server;
+    JmxObjectNameFactory nameFactory;
+    // keep the names so that we can unregister them on core close
+    Set<ObjectName> registered = new HashSet<>();
+
+    MetricsMapListener(MBeanServer server, JmxObjectNameFactory nameFactory) {
+      this.server = server;
+      this.nameFactory = nameFactory;
+    }
+
+    @Override
+    public void onGaugeAdded(String name, Gauge<?> gauge) {
+      if (!(gauge instanceof MetricsMap)) {
+        return;
+      }
+      synchronized (server) {
+        try {
+          ObjectName objectName = nameFactory.createName("gauges", nameFactory.getDomain(), name);
+          log.debug("REGISTER " + objectName);
+          if (registered.contains(objectName) || server.isRegistered(objectName)) {
+            log.debug("-unregistering old instance of " + objectName);
+            try {
+              server.unregisterMBean(objectName);
+            } catch (InstanceNotFoundException e) {
+              // ignore
+            }
+          }
+          // some MBean servers re-write object name to include additional properties
+          ObjectInstance instance = server.registerMBean(gauge, objectName);
+          if (instance != null) {
+            registered.add(instance.getObjectName());
+          }
+        } catch (Exception e) {
+          log.warn("bean registration error", e);
+        }
+      }
+    }
+
+    public void close() {
+      synchronized (server) {
+        for (ObjectName name : registered) {
+          try {
+            if (server.isRegistered(name)) {
+              server.unregisterMBean(name);
+            }
+          } catch (Exception e) {
+            log.debug("bean unregistration error", e);
+          }
+        }
+        registered.clear();
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
index 817dda1..8b7c35e 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrSlf4jReporter.java
@@ -18,6 +18,8 @@ package org.apache.solr.metrics.reporters;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 import com.codahale.metrics.MetricFilter;
@@ -47,7 +49,7 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
   private int period = 60;
   private String instancePrefix = null;
   private String logger = null;
-  private String filterPrefix = null;
+  private List<String> filters = new ArrayList<>();
   private Slf4jReporter reporter;
 
   /**
@@ -65,10 +67,25 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
     this.instancePrefix = prefix;
   }
 
+  /**
+   * Report only metrics with names matching any of the prefix filters.
+   * @param filters list of 0 or more prefixes. If the list is empty then
+   *                all names will match.
+   */
+  public void setFilter(List<String> filters) {
+    if (filters == null || filters.isEmpty()) {
+      return;
+    }
+    this.filters.addAll(filters);
+  }
+
   public void setFilter(String filter) {
-    this.filterPrefix = filter;
+    if (filter != null && !filter.isEmpty()) {
+      this.filters.add(filter);
+    }
   }
 
+
   public void setLogger(String logger) {
     this.logger = logger;
   }
@@ -79,6 +96,10 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
 
   @Override
   protected void validate() throws IllegalStateException {
+    if (!enabled) {
+      log.info("Reporter disabled for registry " + registryName);
+      return;
+    }
     if (period < 1) {
       throw new IllegalStateException("Init argument 'period' is in time unit 'seconds' and must be at least 1.");
     }
@@ -93,8 +114,8 @@ public class SolrSlf4jReporter extends SolrMetricReporter {
         .convertDurationsTo(TimeUnit.MILLISECONDS);
 
     MetricFilter filter;
-    if (filterPrefix != null) {
-      filter = new SolrMetricManager.PrefixFilter(filterPrefix);
+    if (!filters.isEmpty()) {
+      filter = new SolrMetricManager.PrefixFilter(filters);
     } else {
       filter = MetricFilter.ALL;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
index a34accd..c437457 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
@@ -33,7 +33,7 @@ import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.handler.admin.MetricsCollectorHandler;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricReporter;
@@ -92,14 +92,14 @@ import static org.apache.solr.common.params.CommonParams.ID;
 public class SolrClusterReporter extends SolrMetricReporter {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  public static final String CLUSTER_GROUP = SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.cluster.toString());
+  public static final String CLUSTER_GROUP = SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.cluster.toString());
 
   public static final List<SolrReporter.Report> DEFAULT_REPORTS = new ArrayList<SolrReporter.Report>() {{
     add(new SolrReporter.Report(CLUSTER_GROUP, "jetty",
-        SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jetty.toString()),
+        SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.jetty.toString()),
         Collections.emptySet())); // all metrics
     add(new SolrReporter.Report(CLUSTER_GROUP, "jvm",
-        SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jvm.toString()),
+        SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.jvm.toString()),
         new HashSet<String>() {{
           add("memory\\.total\\..*");
           add("memory\\.heap\\..*");
@@ -109,7 +109,7 @@ public class SolrClusterReporter extends SolrMetricReporter {
           add("os\\.OpenFileDescriptorCount");
           add("threads\\.count");
         }}));
-    add(new SolrReporter.Report(CLUSTER_GROUP, "node", SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.node.toString()),
+    add(new SolrReporter.Report(CLUSTER_GROUP, "node", SolrMetricManager.overridableRegistryName(SolrInfoBean.Group.node.toString()),
         new HashSet<String>() {{
           add("CONTAINER\\.cores\\..*");
           add("CONTAINER\\.fs\\..*");
@@ -159,6 +159,16 @@ public class SolrClusterReporter extends SolrMetricReporter {
     });
   }
 
+  public void setReport(Map map) {
+    if (map == null || map.isEmpty()) {
+      return;
+    }
+    SolrReporter.Report r = SolrReporter.Report.fromMap(map);
+    if (r != null) {
+      reports.add(r);
+    }
+  }
+
   // for unit tests
   int getPeriod() {
     return period;
@@ -170,9 +180,6 @@ public class SolrClusterReporter extends SolrMetricReporter {
 
   @Override
   protected void validate() throws IllegalStateException {
-    if (period < 1) {
-      log.info("Turning off node reporter, period=" + period);
-    }
     if (reports.isEmpty()) { // set defaults
       reports = DEFAULT_REPORTS;
     }
@@ -189,12 +196,17 @@ public class SolrClusterReporter extends SolrMetricReporter {
     if (reporter != null) {
       reporter.close();;
     }
+    if (!enabled) {
+      log.info("Reporter disabled for registry " + registryName);
+      return;
+    }
     // start reporter only in cloud mode
     if (!cc.isZooKeeperAware()) {
       log.warn("Not ZK-aware, not starting...");
       return;
     }
     if (period < 1) { // don't start it
+      log.info("Turning off node reporter, period=" + period);
       return;
     }
     HttpClient httpClient = cc.getUpdateShardHandler().getHttpClient();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
index 0bca68f..1923877 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
@@ -361,7 +361,7 @@ public class SolrReporter extends ScheduledReporter {
         }
         final String effectiveGroup = group;
         MetricUtils.toSolrInputDocuments(metricManager.registry(registryName), Collections.singletonList(report.filter), MetricFilter.ALL,
-            skipHistograms, skipAggregateValues, false, metadata, doc -> {
+            MetricUtils.PropertyFilter.ALL, skipHistograms, skipAggregateValues, false, metadata, doc -> {
               doc.setField(REGISTRY_ID, registryName);
               doc.setField(GROUP_ID, effectiveGroup);
               if (effectiveLabel != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
index 8b36d3e..0cf14db 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
@@ -98,7 +98,13 @@ public class SolrShardReporter extends SolrMetricReporter {
     if (filterConfig == null || filterConfig.isEmpty()) {
       return;
     }
-    filters = filterConfig;
+    filters.addAll(filterConfig);
+  }
+
+  public void setFilter(String filter) {
+    if (filter != null && !filter.isEmpty()) {
+      this.filters.add(filter);
+    }
   }
 
   // for unit tests
@@ -108,9 +114,6 @@ public class SolrShardReporter extends SolrMetricReporter {
 
   @Override
   protected void validate() throws IllegalStateException {
-    if (period < 1) {
-      log.info("Turning off shard reporter, period=" + period);
-    }
     if (filters.isEmpty()) {
       filters = DEFAULT_FILTERS;
     }
@@ -128,13 +131,17 @@ public class SolrShardReporter extends SolrMetricReporter {
     if (reporter != null) {
       reporter.close();
     }
+    if (!enabled) {
+      log.info("Reporter disabled for registry " + registryName);
+      return;
+    }
     if (core.getCoreDescriptor().getCloudDescriptor() == null) {
       // not a cloud core
       log.warn("Not initializing shard reporter for non-cloud core " + core.getName());
       return;
     }
     if (period < 1) { // don't start it
-      log.warn("Not starting shard reporter ");
+      log.warn("period=" + period + ", not starting shard reporter ");
       return;
     }
     // our id is coreNodeName
@@ -154,7 +161,7 @@ public class SolrShardReporter extends SolrMetricReporter {
         .cloudClient(false) // we want to send reports specifically to a selected leader instance
         .skipAggregateValues(true) // we don't want to transport details of aggregates
         .skipHistograms(true) // we don't want to transport histograms
-        .build(core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getHttpClient(), new LeaderUrlSupplier(core));
+        .build(core.getCoreContainer().getUpdateShardHandler().getHttpClient(), new LeaderUrlSupplier(core));
 
     reporter.start(period, TimeUnit.SECONDS);
   }
@@ -172,7 +179,7 @@ public class SolrShardReporter extends SolrMetricReporter {
       if (cd == null) {
         return null;
       }
-      ClusterState state = core.getCoreDescriptor().getCoreContainer().getZkController().getClusterState();
+      ClusterState state = core.getCoreContainer().getZkController().getClusterState();
       DocCollection collection = state.getCollection(core.getCoreDescriptor().getCollectionName());
       Replica replica = collection.getLeader(core.getCoreDescriptor().getCloudDescriptor().getShardId());
       if (replica == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/parser/QueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/QueryParser.java b/solr/core/src/java/org/apache/solr/parser/QueryParser.java
index e846c6e..2b64b88 100644
--- a/solr/core/src/java/org/apache/solr/parser/QueryParser.java
+++ b/solr/core/src/java/org/apache/solr/parser/QueryParser.java
@@ -52,12 +52,13 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
 
   @Override
   protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
-                                boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+                                boolean quoted, boolean fieldAutoGenPhraseQueries, boolean fieldEnableGraphQueries)
+      throws SyntaxError {
     setAutoGenerateMultiTermSynonymsPhraseQuery(fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries());
     // Don't auto-quote graph-aware field queries 
     boolean treatAsQuoted = getSplitOnWhitespace()
         ? (quoted || fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries()) : quoted;
-    return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false);
+    return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false, fieldEnableGraphQueries);
   }
 
 // *   Query  ::= ( Clause )*

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/QueryParser.jj b/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
index d4d6539..c07b28d 100644
--- a/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
+++ b/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
@@ -76,12 +76,13 @@ public class QueryParser extends SolrQueryParserBase {
 
   @Override
   protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
-                                boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+                                boolean quoted, boolean fieldAutoGenPhraseQueries, boolean fieldEnableGraphQueries)
+      throws SyntaxError {
     setAutoGenerateMultiTermSynonymsPhraseQuery(fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries());
     // Don't auto-quote graph-aware field queries 
     boolean treatAsQuoted = getSplitOnWhitespace()
         ? (quoted || fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries()) : quoted;
-    return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false);
+    return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false, fieldEnableGraphQueries);
   }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
index 08ccdd1..dff4a4f 100644
--- a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
+++ b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
@@ -437,10 +437,14 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
   }
 
   protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
-                                boolean quoted, boolean fieldAutoGenPhraseQueries)  throws SyntaxError {
+                                boolean quoted, boolean fieldAutoGenPhraseQueries, boolean fieldEnableGraphQueries)
+      throws SyntaxError {
     BooleanClause.Occur occur = operator == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;
-    return createFieldQuery(analyzer, occur, field, queryText,
+    setEnableGraphQueries(fieldEnableGraphQueries);
+    Query query = createFieldQuery(analyzer, occur, field, queryText,
         quoted || fieldAutoGenPhraseQueries || autoGeneratePhraseQueries, phraseSlop);
+    setEnableGraphQueries(true); // reset back to default
+    return query;
   }
 
 
@@ -632,8 +636,10 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
           Query subq;
           if (ft.isTokenized() && sfield.indexed()) {
             boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
+            boolean fieldEnableGraphQueries = ft instanceof TextField && ((TextField)ft).getEnableGraphQueries();
+
             subq = newFieldQuery(getAnalyzer(), sfield.getName(), rawq.getJoinedExternalVal(),
-                false, fieldAutoGenPhraseQueries);
+                false, fieldAutoGenPhraseQueries, fieldEnableGraphQueries);
             booleanBuilder.add(subq, BooleanClause.Occur.SHOULD);
           } else {
             for (String externalVal : rawq.getExternalVals()) {
@@ -949,7 +955,8 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
       // delegate to type for everything except tokenized fields
       if (ft.isTokenized() && sf.indexed()) {
         boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
-        return newFieldQuery(getAnalyzer(), field, queryText, quoted, fieldAutoGenPhraseQueries);
+        boolean fieldEnableGraphQueries = ft instanceof TextField && ((TextField)ft).getEnableGraphQueries();
+        return newFieldQuery(getAnalyzer(), field, queryText, quoted, fieldAutoGenPhraseQueries, fieldEnableGraphQueries);
       } else {
         if (raw) {
           return new RawQuery(sf, queryText);
@@ -960,7 +967,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
     }
 
     // default to a normal field query
-    return newFieldQuery(getAnalyzer(), field, queryText, quoted, false);
+    return newFieldQuery(getAnalyzer(), field, queryText, quoted, false, true);
   }
 
   // Assumption: quoted is always false
@@ -993,7 +1000,9 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
       if (ft.isTokenized() && sf.indexed()) {
         String queryText = queryTerms.size() == 1 ? queryTerms.get(0) : String.join(" ", queryTerms);
         boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
-        return newFieldQuery(getAnalyzer(), field, queryText, false, fieldAutoGenPhraseQueries);
+        boolean fieldEnableGraphQueries = ft instanceof TextField && ((TextField)ft).getEnableGraphQueries();
+        return newFieldQuery
+            (getAnalyzer(), field, queryText, false, fieldAutoGenPhraseQueries, fieldEnableGraphQueries);
       } else {
         if (raw) {
           return new RawQuery(sf, queryTerms);
@@ -1025,7 +1034,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
 
     // default to a normal field query
     String queryText = queryTerms.size() == 1 ? queryTerms.get(0) : String.join(" ", queryTerms);
-    return newFieldQuery(getAnalyzer(), field, queryText, false, false);
+    return newFieldQuery(getAnalyzer(), field, queryText, false, false, true);
   }
 
   protected boolean isRangeShouldBeProtectedFromReverse(String field, String part1){

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
index 0bfef4c..406a4ed 100644
--- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
@@ -52,8 +52,9 @@ import org.apache.lucene.search.FilterCollector;
 import org.apache.lucene.search.LeafCollector;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
-import org.apache.lucene.search.grouping.term.TermGroupFacetCollector;
+import org.apache.lucene.search.grouping.AllGroupsCollector;
+import org.apache.lucene.search.grouping.TermGroupFacetCollector;
+import org.apache.lucene.search.grouping.TermGroupSelector;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.lucene.util.StringHelper;
@@ -330,7 +331,7 @@ public class SimpleFacets {
       );
     }
 
-    TermAllGroupsCollector collector = new TermAllGroupsCollector(groupField);
+    AllGroupsCollector collector = new AllGroupsCollector<>(new TermGroupSelector(groupField));
     Filter mainQueryFilter = docSet.getTopFilter(); // This returns a filter that only matches documents matching with q param and fq params
     Query filteredFacetQuery = new BooleanQuery.Builder()
         .add(facetQuery, Occur.MUST)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java b/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java
index 82ce2e0..8350f9e 100644
--- a/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/request/SolrRequestHandler.java
@@ -17,7 +17,7 @@
 package org.apache.solr.request;
 
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.response.SolrQueryResponse;
 
 /**
@@ -38,7 +38,7 @@ import org.apache.solr.response.SolrQueryResponse;
  *
  *
  */
-public interface SolrRequestHandler extends SolrInfoMBean {
+public interface SolrRequestHandler extends SolrInfoBean {
 
   /** <code>init</code> will be called just once, immediately after creation.
    * <p>The args are user-level initialization parameters that

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
index 3b5ce2e..ffbbb36 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
@@ -33,6 +33,7 @@ import org.apache.lucene.analysis.synonym.SynonymFilterFactory;
 import org.apache.lucene.analysis.synonym.SynonymMap;
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.util.CharsRef;
+import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.NamedList;
@@ -358,9 +359,9 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
         for (String term : cpsm.mappings.keySet()) {
           for (String mapping : cpsm.mappings.get(term)) {
             // apply the case setting to match the behavior of the SynonymMap builder
-            String casedTerm = synonymManager.applyCaseSetting(ignoreCase, term);
-            String casedMapping = synonymManager.applyCaseSetting(ignoreCase, mapping);
-            add(new CharsRef(casedTerm), new CharsRef(casedMapping), false);
+            CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, term), new CharsRefBuilder());
+            CharsRef casedMapping = analyze(synonymManager.applyCaseSetting(ignoreCase, mapping), new CharsRefBuilder());
+            add(casedTerm, casedMapping, false);
           }          
         }
       }      

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
index 7b27c3f..286d2c1 100644
--- a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
+++ b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
@@ -272,7 +272,8 @@ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoa
    */
   public RawCurrencyValueSource getValueSource(SchemaField field, 
                                                QParser parser) {
-    field.checkFieldCacheSource();
+    getAmountField(field).checkFieldCacheSource();
+    getCurrencyField(field).checkFieldCacheSource();
     return new RawCurrencyValueSource(field, defaultCurrency, parser);
   }
 
@@ -488,8 +489,8 @@ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoa
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + targetCurrencyCode);
       }
 
-      SchemaField amountField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_AMOUNT_RAW);
-      SchemaField currencyField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_CURRENCY);
+      SchemaField amountField = getAmountField(sf);
+      SchemaField currencyField = getCurrencyField(sf);
 
       currencyValues = currencyField.getType().getValueSource(currencyField, parser);
       amountValues = amountField.getType().getValueSource(amountField, parser);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/DatePointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DatePointField.java b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
index 50f85e3..ea81ea3 100644
--- a/solr/core/src/java/org/apache/solr/schema/DatePointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
@@ -160,6 +160,9 @@ public class DatePointField extends PointField implements DateValueFieldType {
   @Override
   public Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVals) {
     assert externalVals.size() > 0;
+    if (!field.indexed()) {
+      return super.getSetQuery(parser, field, externalVals);
+    }
     long[] values = new long[externalVals.size()];
     int i = 0;
     for (String val:externalVals) {
@@ -203,7 +206,7 @@ public class DatePointField extends PointField implements DateValueFieldType {
   @Override
   public UninvertingReader.Type getUninversionType(SchemaField sf) {
     if (sf.multiValued()) {
-      return UninvertingReader.Type.SORTED_LONG;
+      return null;
     } else {
       return UninvertingReader.Type.LONG_POINT;
     }
@@ -222,8 +225,6 @@ public class DatePointField extends PointField implements DateValueFieldType {
 
   @Override
   public IndexableField createField(SchemaField field, Object value) {
-    if (!isFieldUsed(field)) return null;
-
     Date date = (value instanceof Date)
         ? ((Date)value)
         : DateMathParser.parseMath(null, value.toString());
@@ -266,4 +267,4 @@ class DatePointFieldSource extends LongFieldSource {
   public long externalToLong(String extVal) {
     return DateMathParser.parseMath(null, extVal).getTime();
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DoublePointField.java b/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
index 6ae8349..05a1ce7 100644
--- a/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DoublePointField.java
@@ -106,6 +106,9 @@ public class DoublePointField extends PointField implements DoubleValueFieldType
   @Override
   public Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVal) {
     assert externalVal.size() > 0;
+    if (!field.indexed()) {
+      return super.getSetQuery(parser, field, externalVal);
+    }
     double[] values = new double[externalVal.size()];
     int i = 0;
     for (String val:externalVal) {
@@ -148,7 +151,7 @@ public class DoublePointField extends PointField implements DoubleValueFieldType
   @Override
   public Type getUninversionType(SchemaField sf) {
     if (sf.multiValued()) {
-      return Type.SORTED_DOUBLE;
+      return null;
     } else {
       return Type.DOUBLE_POINT;
     }
@@ -167,8 +170,6 @@ public class DoublePointField extends PointField implements DoubleValueFieldType
 
   @Override
   public IndexableField createField(SchemaField field, Object value) {
-    if (!isFieldUsed(field)) return null;
-
     double doubleValue = (value instanceof Number) ? ((Number) value).doubleValue() : Double.parseDouble(value.toString());
     return new DoublePoint(field.getName(), doubleValue);
   }


[11/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/CursorPagingTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/CursorPagingTest.java b/solr/core/src/test/org/apache/solr/CursorPagingTest.java
index b204677..eb1c6bc 100644
--- a/solr/core/src/test/org/apache/solr/CursorPagingTest.java
+++ b/solr/core/src/test/org/apache/solr/CursorPagingTest.java
@@ -19,7 +19,6 @@ package org.apache.solr;
 import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.SentinelIntSet;
 import org.apache.lucene.util.mutable.MutableValueInt;
-import org.apache.solr.core.SolrInfoMBean;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.CursorMarkParams;
 import org.apache.solr.common.params.SolrParams;
@@ -32,6 +31,7 @@ import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.CursorMark; //jdoc
 import org.noggit.ObjectBuilder;
@@ -521,16 +521,16 @@ public class CursorPagingTest extends SolrTestCaseJ4 {
 
     final Collection<String> allFieldNames = getAllSortFieldNames();
 
-    final SolrInfoMBean filterCacheStats 
-      = h.getCore().getInfoRegistry().get("filterCache");
+    final MetricsMap filterCacheStats =
+        (MetricsMap)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.filterCache");
     assertNotNull(filterCacheStats);
-    final SolrInfoMBean queryCacheStats 
-      = h.getCore().getInfoRegistry().get("queryResultCache");
+    final MetricsMap queryCacheStats =
+        (MetricsMap)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache");
     assertNotNull(queryCacheStats);
 
-    final long preQcIn = (Long) queryCacheStats.getStatistics().get("inserts");
-    final long preFcIn = (Long) filterCacheStats.getStatistics().get("inserts");
-    final long preFcHits = (Long) filterCacheStats.getStatistics().get("hits");
+    final long preQcIn = (Long) queryCacheStats.getValue().get("inserts");
+    final long preFcIn = (Long) filterCacheStats.getValue().get("inserts");
+    final long preFcHits = (Long) filterCacheStats.getValue().get("hits");
 
     SentinelIntSet ids = assertFullWalkNoDups
       (10, params("q", "*:*",
@@ -542,9 +542,9 @@ public class CursorPagingTest extends SolrTestCaseJ4 {
     
     assertEquals(6, ids.size());
 
-    final long postQcIn = (Long) queryCacheStats.getStatistics().get("inserts");
-    final long postFcIn = (Long) filterCacheStats.getStatistics().get("inserts");
-    final long postFcHits = (Long) filterCacheStats.getStatistics().get("hits");
+    final long postQcIn = (Long) queryCacheStats.getValue().get("inserts");
+    final long postFcIn = (Long) filterCacheStats.getValue().get("inserts");
+    final long postFcHits = (Long) filterCacheStats.getValue().get("hits");
     
     assertEquals("query cache inserts changed", preQcIn, postQcIn);
     // NOTE: use of pure negative filters causees "*:* to be tracked in filterCache

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java
new file mode 100644
index 0000000..d39c87f
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr;
+
+import org.apache.lucene.util.TestUtil;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.handler.StandardRequestHandler;
+import org.apache.solr.handler.admin.LukeRequestHandler;
+import org.apache.solr.handler.component.SearchComponent;
+import org.apache.solr.highlight.DefaultSolrHighlighter;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.search.LRUCache;
+import org.junit.BeforeClass;
+import java.io.File;
+import java.net.URI;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.List;
+
+/**
+ * A simple test used to increase code coverage for some standard things...
+ */
+public class SolrInfoBeanTest extends SolrTestCaseJ4
+{
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("solrconfig.xml","schema.xml");
+  }
+
+  /**
+   * Gets a list of everything we can find in the classpath and makes sure it has
+   * a name, description, etc...
+   */
+  public void testCallMBeanInfo() throws Exception {
+    List<Class> classes = new ArrayList<>();
+    classes.addAll(getClassesForPackage(StandardRequestHandler.class.getPackage().getName()));
+    classes.addAll(getClassesForPackage(SearchComponent.class.getPackage().getName()));
+    classes.addAll(getClassesForPackage(LukeRequestHandler.class.getPackage().getName()));
+    classes.addAll(getClassesForPackage(DefaultSolrHighlighter.class.getPackage().getName()));
+    classes.addAll(getClassesForPackage(LRUCache.class.getPackage().getName()));
+   // System.out.println(classes);
+    
+    int checked = 0;
+    SolrMetricManager metricManager = h.getCoreContainer().getMetricManager();
+    String registry = h.getCore().getCoreMetricManager().getRegistryName();
+    String scope = TestUtil.randomSimpleString(random(), 2, 10);
+    for( Class clazz : classes ) {
+      if( SolrInfoBean.class.isAssignableFrom( clazz ) ) {
+        try {
+          SolrInfoBean info = (SolrInfoBean)clazz.newInstance();
+          if (info instanceof SolrMetricProducer) {
+            ((SolrMetricProducer)info).initializeMetrics(metricManager, registry, scope);
+          }
+          
+          //System.out.println( info.getClass() );
+          assertNotNull( info.getName() );
+          assertNotNull( info.getDescription() );
+          assertNotNull( info.getCategory() );
+          
+          if( info instanceof LRUCache ) {
+            continue;
+          }
+          
+          assertNotNull( info.toString() );
+          checked++;
+        }
+        catch( InstantiationException ex ) {
+          // expected...
+          //System.out.println( "unable to initialize: "+clazz );
+        }
+      }
+    }
+    assertTrue( "there are at least 10 SolrInfoBean that should be found in the classpath, found " + checked, checked > 10 );
+  }
+  
+  private static List<Class> getClassesForPackage(String pckgname) throws Exception {
+    ArrayList<File> directories = new ArrayList<>();
+    ClassLoader cld = h.getCore().getResourceLoader().getClassLoader();
+    String path = pckgname.replace('.', '/');
+    Enumeration<URL> resources = cld.getResources(path);
+    while (resources.hasMoreElements()) {
+      final URI uri = resources.nextElement().toURI();
+      if (!"file".equalsIgnoreCase(uri.getScheme()))
+        continue;
+      final File f = new File(uri);
+      directories.add(f);
+    }
+      
+    ArrayList<Class> classes = new ArrayList<>();
+    for (File directory : directories) {
+      if (directory.exists()) {
+        String[] files = directory.list();
+        for (String file : files) {
+          if (file.endsWith(".class")) {
+             String clazzName = file.substring(0, file.length() - 6);
+             // exclude Test classes that happen to be in these packages.
+             // class.ForName'ing some of them can cause trouble.
+             if (!clazzName.endsWith("Test") && !clazzName.startsWith("Test")) {
+               classes.add(Class.forName(pckgname + '.' + clazzName));
+             }
+          }
+        }
+      }
+    }
+    assertFalse("No classes found in package '"+pckgname+"'; maybe your test classes are packaged as JAR file?", classes.isEmpty());
+    return classes;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java b/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java
deleted file mode 100644
index bfe2316..0000000
--- a/solr/core/src/test/org/apache/solr/SolrInfoMBeanTest.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr;
-
-import org.apache.solr.core.SolrInfoMBean;
-import org.apache.solr.handler.StandardRequestHandler;
-import org.apache.solr.handler.admin.LukeRequestHandler;
-import org.apache.solr.handler.component.SearchComponent;
-import org.apache.solr.highlight.DefaultSolrHighlighter;
-import org.apache.solr.search.LRUCache;
-import org.junit.BeforeClass;
-import java.io.File;
-import java.net.URI;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.List;
-
-/**
- * A simple test used to increase code coverage for some standard things...
- */
-public class SolrInfoMBeanTest extends SolrTestCaseJ4
-{
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    initCore("solrconfig.xml","schema.xml");
-  }
-
-  /**
-   * Gets a list of everything we can find in the classpath and makes sure it has
-   * a name, description, etc...
-   */
-  public void testCallMBeanInfo() throws Exception {
-    List<Class> classes = new ArrayList<>();
-    classes.addAll(getClassesForPackage(StandardRequestHandler.class.getPackage().getName()));
-    classes.addAll(getClassesForPackage(SearchComponent.class.getPackage().getName()));
-    classes.addAll(getClassesForPackage(LukeRequestHandler.class.getPackage().getName()));
-    classes.addAll(getClassesForPackage(DefaultSolrHighlighter.class.getPackage().getName()));
-    classes.addAll(getClassesForPackage(LRUCache.class.getPackage().getName()));
-   // System.out.println(classes);
-    
-    int checked = 0;
-    for( Class clazz : classes ) {
-      if( SolrInfoMBean.class.isAssignableFrom( clazz ) ) {
-        try {
-          SolrInfoMBean info = (SolrInfoMBean)clazz.newInstance();
-          
-          //System.out.println( info.getClass() );
-          assertNotNull( info.getName() );
-          assertNotNull( info.getDescription() );
-          assertNotNull( info.getCategory() );
-          
-          if( info instanceof LRUCache ) {
-            continue;
-          }
-          
-          assertNotNull( info.toString() );
-          // increase code coverage...
-          assertNotNull( info.getDocs() + "" );
-          assertNotNull( info.getStatistics()+"" );
-          checked++;
-        }
-        catch( InstantiationException ex ) {
-          // expected...
-          //System.out.println( "unable to initialize: "+clazz );
-        }
-      }
-    }
-    assertTrue( "there are at least 10 SolrInfoMBean that should be found in the classpath, found " + checked, checked > 10 );
-  }
-  
-  private static List<Class> getClassesForPackage(String pckgname) throws Exception {
-    ArrayList<File> directories = new ArrayList<>();
-    ClassLoader cld = h.getCore().getResourceLoader().getClassLoader();
-    String path = pckgname.replace('.', '/');
-    Enumeration<URL> resources = cld.getResources(path);
-    while (resources.hasMoreElements()) {
-      final URI uri = resources.nextElement().toURI();
-      if (!"file".equalsIgnoreCase(uri.getScheme()))
-        continue;
-      final File f = new File(uri);
-      directories.add(f);
-    }
-      
-    ArrayList<Class> classes = new ArrayList<>();
-    for (File directory : directories) {
-      if (directory.exists()) {
-        String[] files = directory.list();
-        for (String file : files) {
-          if (file.endsWith(".class")) {
-             String clazzName = file.substring(0, file.length() - 6);
-             // exclude Test classes that happen to be in these packages.
-             // class.ForName'ing some of them can cause trouble.
-             if (!clazzName.endsWith("Test") && !clazzName.startsWith("Test")) {
-               classes.add(Class.forName(pckgname + '.' + clazzName));
-             }
-          }
-        }
-      }
-    }
-    assertFalse("No classes found in package '"+pckgname+"'; maybe your test classes are packaged as JAR file?", classes.isEmpty());
-    return classes;
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
index e659727..2d46551 100644
--- a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
@@ -838,7 +838,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
         Object realResponse = ObjectBuilder.fromJSON(strResponse);
         String err = JSONTestUtil.matchObj("/grouped/" + groupField, realResponse, modelResponse);
         if (err != null) {
-          log.error("GROUPING MISMATCH: " + err
+          log.error("GROUPING MISMATCH (" + queryIter + "): " + err
            + "\n\trequest="+req
            + "\n\tresult="+strResponse
            + "\n\texpected="+ JSONUtil.toJSON(modelResponse)
@@ -854,7 +854,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
         // assert post / pre grouping facets
         err = JSONTestUtil.matchObj("/facet_counts/facet_fields/"+FOO_STRING_FIELD, realResponse, expectedFacetResponse);
         if (err != null) {
-          log.error("GROUPING MISMATCH: " + err
+          log.error("GROUPING MISMATCH (" + queryIter + "): " + err
            + "\n\trequest="+req
            + "\n\tresult="+strResponse
            + "\n\texpected="+ JSONUtil.toJSON(expectedFacetResponse)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
index 6ca072b..869650df 100644
--- a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
@@ -57,6 +57,10 @@ public class AliasIntegrationTest extends SolrCloudTestCase {
 
     CollectionAdminRequest.createAlias("testalias", "collection1").process(cluster.getSolrClient());
 
+    // ensure that the alias has been registered
+    assertEquals("collection1",
+        new CollectionAdminRequest.ListAliases().process(cluster.getSolrClient()).getAliases().get("testalias"));
+
     // search for alias
     QueryResponse res = cluster.getSolrClient().query("testalias", new SolrQuery("*:*"));
     assertEquals(3, res.getResults().getNumFound());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
index d1dbe9c..1c23c9c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
@@ -59,6 +59,7 @@ import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -752,19 +753,28 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
 
   private Long getNumCommits(HttpSolrClient sourceClient) throws
       SolrServerException, IOException {
-    try (HttpSolrClient client = getHttpSolrClient(sourceClient.getBaseURL())) {
+    // construct the /admin/metrics URL
+    URL url = new URL(sourceClient.getBaseURL());
+    String path = url.getPath().substring(1);
+    String[] elements = path.split("/");
+    String collection = elements[elements.length - 1];
+    String urlString = url.toString();
+    urlString = urlString.substring(0, urlString.length() - collection.length() - 1);
+    try (HttpSolrClient client = getHttpSolrClient(urlString)) {
       client.setConnectionTimeout(15000);
       client.setSoTimeout(60000);
       ModifiableSolrParams params = new ModifiableSolrParams();
-      params.set("qt", "/admin/mbeans?key=updateHandler&stats=true");
+      //params.set("qt", "/admin/metrics?prefix=UPDATE.updateHandler&registry=solr.core." + collection);
+      params.set("qt", "/admin/metrics");
+      params.set("prefix", "UPDATE.updateHandler");
+      params.set("registry", "solr.core." + collection);
       // use generic request to avoid extra processing of queries
       QueryRequest req = new QueryRequest(params);
       NamedList<Object> resp = client.request(req);
-      NamedList mbeans = (NamedList) resp.get("solr-mbeans");
-      NamedList uhandlerCat = (NamedList) mbeans.get("UPDATE");
-      NamedList uhandler = (NamedList) uhandlerCat.get("updateHandler");
-      NamedList stats = (NamedList) uhandler.get("stats");
-      return (Long) stats.get("commits");
+      NamedList metrics = (NamedList) resp.get("metrics");
+      NamedList uhandlerCat = (NamedList) metrics.getVal(0);
+      Map<String,Object> commits = (Map<String,Object>) uhandlerCat.get("UPDATE.updateHandler.commits");
+      return (Long) commits.get("count");
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java
index 26fa325..f48f76b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/BasicZkTest.java
@@ -16,11 +16,14 @@
  */
 package org.apache.solr.cloud;
 
+import java.util.Map;
+
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Metric;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
@@ -158,11 +161,11 @@ public class BasicZkTest extends AbstractZkTestCase {
     }
     
     // test stats call
-    NamedList stats = core.getStatistics();
-    assertEquals("collection1", stats.get("coreName"));
-    assertEquals("collection1", stats.get("collection"));
-    assertEquals("shard1", stats.get("shard"));
-    assertTrue(stats.get("refCount") != null);
+    Map<String, Metric> metrics = h.getCore().getCoreMetricManager().getRegistry().getMetrics();
+    assertEquals("collection1", ((Gauge)metrics.get("CORE.coreName")).getValue());
+    assertEquals("collection1", ((Gauge)metrics.get("CORE.collection")).getValue());
+    assertEquals("shard1", ((Gauge)metrics.get("CORE.shard")).getValue());
+    assertTrue(metrics.get("CORE.refCount") != null);
 
     //zkController.getZkClient().printLayoutToStdOut();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java
new file mode 100644
index 0000000..e0cf3f7
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java
@@ -0,0 +1,233 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.Utils;
+
+public class ClusterStateMockUtil {
+
+  private final static Pattern BLUEPRINT = Pattern.compile("([a-z])(\\d+)?(?:(['A','R','D','F']))?(\\*)?");
+
+  protected static class Result implements Closeable {
+    OverseerAutoReplicaFailoverThread.DownReplica badReplica;
+    ZkStateReader reader;
+
+    @Override
+    public void close() throws IOException {
+      reader.close();
+    }
+  }
+
+  protected static ClusterStateMockUtil.Result buildClusterState(List<Result> results, String string, String ... liveNodes) {
+    return buildClusterState(results, string, 1, liveNodes);
+  }
+
+  protected static ClusterStateMockUtil.Result buildClusterState(List<Result> results, String string, int replicationFactor, String ... liveNodes) {
+    return buildClusterState(results, string, replicationFactor, 10, liveNodes);
+  }
+
+  /**
+   * This method lets you construct a complex ClusterState object by using simple strings of letters.
+   *
+   * c = collection, s = slice, r = replica, \d = node number (r2 means the replica is on node 2),
+   * state = [A,R,D,F], * = replica to replace, binds to the left.
+   *
+   * For example:
+   * csrr2rD*sr2csr
+   *
+   * Creates:
+   *
+   * 'csrr2rD*'
+   * A collection, a shard, a replica on node 1 (the default) that is active (the default), a replica on node 2, and a replica on node 1
+   * that has a state of down and is the replica we will be looking to put somewhere else (the *).
+   *
+   * 'sr2'
+   * Then, another shard that has a replica on node 2.
+   *
+   * 'csr'
+   * Then, another collection that has a shard with a single active replica on node 1.
+   *
+   * Result:
+   *        {
+   *         "collection2":{
+   *           "maxShardsPerNode":"1",
+   *           "replicationFactor":"1",
+   *           "shards":{"slice1":{
+   *               "state":"active",
+   *               "replicas":{"replica5":{
+   *                   "state":"active",
+   *                   "node_name":"baseUrl1_",
+   *                   "base_url":"http://baseUrl1"}}}}},
+   *         "collection1":{
+   *           "maxShardsPerNode":"1",
+   *           "replicationFactor":"1",
+   *           "shards":{
+   *             "slice1":{
+   *               "state":"active",
+   *               "replicas":{
+   *                 "replica3 (bad)":{
+   *                   "state":"down",
+   *                   "node_name":"baseUrl1_",
+   *                   "base_url":"http://baseUrl1"},
+   *                 "replica2":{
+   *                   "state":"active",
+   *                   "node_name":"baseUrl2_",
+   *                   "base_url":"http://baseUrl2"},
+   *                 "replica1":{
+   *                   "state":"active",
+   *                   "node_name":"baseUrl1_",
+   *                   "base_url":"http://baseUrl1"}}},
+   *             "slice2":{
+   *               "state":"active",
+   *               "replicas":{"replica4":{
+   *                   "state":"active",
+   *                   "node_name":"baseUrl2_",
+   *                   "base_url":"http://baseUrl2"}}}}}}
+   *
+   */
+  @SuppressWarnings("resource")
+  protected static ClusterStateMockUtil.Result buildClusterState(List<Result> results, String clusterDescription, int replicationFactor, int maxShardsPerNode, String ... liveNodes) {
+    ClusterStateMockUtil.Result result = new ClusterStateMockUtil.Result();
+
+    Map<String,Slice> slices = null;
+    Map<String,Replica> replicas = null;
+    Map<String,Object> collectionProps = new HashMap<>();
+    collectionProps.put(ZkStateReader.MAX_SHARDS_PER_NODE, Integer.toString(maxShardsPerNode));
+    collectionProps.put(ZkStateReader.REPLICATION_FACTOR, Integer.toString(replicationFactor));
+    Map<String,DocCollection> collectionStates = new HashMap<>();
+    DocCollection docCollection = null;
+    Slice slice = null;
+    int replicaCount = 1;
+
+    Matcher m = BLUEPRINT.matcher(clusterDescription);
+    while (m.find()) {
+      Replica replica;
+      switch (m.group(1)) {
+        case "c":
+          slices = new HashMap<>();
+          docCollection = new DocCollection("collection" + (collectionStates.size() + 1), slices, collectionProps, null);
+          collectionStates.put(docCollection.getName(), docCollection);
+          break;
+        case "s":
+          replicas = new HashMap<>();
+          slice = new Slice("slice" + (slices.size() + 1), replicas, null);
+          slices.put(slice.getName(), slice);
+          break;
+        case "r":
+          Map<String,Object> replicaPropMap = new HashMap<>();
+          String node;
+
+          node = m.group(2);
+
+          if (node == null || node.trim().length() == 0) {
+            node = "1";
+          }
+
+          Replica.State state = Replica.State.ACTIVE;
+          String stateCode = m.group(3);
+
+          if (stateCode != null) {
+            switch (stateCode.charAt(0)) {
+              case 'S':
+                state = Replica.State.ACTIVE;
+                break;
+              case 'R':
+                state = Replica.State.RECOVERING;
+                break;
+              case 'D':
+                state = Replica.State.DOWN;
+                break;
+              case 'F':
+                state = Replica.State.RECOVERY_FAILED;
+                break;
+              default:
+                throw new IllegalArgumentException(
+                    "Unexpected state for replica: " + stateCode);
+            }
+          }
+
+          String nodeName = "baseUrl" + node + "_";
+          String replicaName = "replica" + replicaCount++;
+
+          if ("*".equals(m.group(4))) {
+            replicaName += " (bad)";
+          }
+
+          replicaPropMap.put(ZkStateReader.NODE_NAME_PROP, nodeName);
+          replicaPropMap.put(ZkStateReader.BASE_URL_PROP, "http://baseUrl" + node);
+          replicaPropMap.put(ZkStateReader.STATE_PROP, state.toString());
+
+          replica = new Replica(replicaName, replicaPropMap);
+
+          if ("*".equals(m.group(4))) {
+            result.badReplica = new OverseerAutoReplicaFailoverThread.DownReplica();
+            result.badReplica.replica = replica;
+            result.badReplica.slice = slice;
+            result.badReplica.collection = docCollection;
+          }
+
+          replicas.put(replica.getName(), replica);
+          break;
+        default:
+          break;
+      }
+    }
+
+    ClusterState clusterState = new ClusterState(1, new HashSet<>(Arrays.asList(liveNodes)), collectionStates);
+    MockZkStateReader reader = new MockZkStateReader(clusterState, collectionStates.keySet());
+
+    String json;
+    try {
+      json = new String(Utils.toJSON(clusterState), "UTF-8");
+    } catch (UnsupportedEncodingException e) {
+      throw new RuntimeException("Unexpected");
+    }
+    System.err.println(json);
+
+    // todo remove the limitation of always having a bad replica
+    assert result.badReplica != null : "Is there no bad replica?";
+    assert result.badReplica.slice != null : "Is there no bad replica?";
+
+    result.reader = reader;
+
+    if (results != null) {
+      results.add(result);
+    }
+
+    return result;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
index dcb115a..30c3c9e 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
@@ -29,6 +29,7 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest.SplitShard;
 import org.apache.solr.client.solrj.response.RequestStatusState;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -178,11 +179,22 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase {
       //expected
     }
 
-    String replica = shard1.getReplicas().iterator().next().getName();
+    Replica replica = shard1.getReplicas().iterator().next();
+    for (String liveNode : client.getZkStateReader().getClusterState().getLiveNodes()) {
+      if (!replica.getNodeName().equals(liveNode)) {
+        state = new CollectionAdminRequest.MoveReplica(collection, replica.getName(), liveNode)
+            .processAndWait(client, MAX_TIMEOUT_SECONDS);
+        assertSame("MoveReplica did not complete", RequestStatusState.COMPLETED, state);
+        break;
+      }
+    }
+
+    shard1 = client.getZkStateReader().getClusterState().getSlice(collection, "shard1");
+    String replicaName = shard1.getReplicas().iterator().next().getName();
     state = new CollectionAdminRequest.DeleteReplica()
         .setCollectionName(collection)
         .setShardName("shard1")
-        .setReplica(replica)
+        .setReplica(replicaName)
         .processAndWait(client, MAX_TIMEOUT_SECONDS);
     assertSame("DeleteReplica did not complete", RequestStatusState.COMPLETED, state);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
index 7925358..ed9ed41 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
@@ -20,6 +20,7 @@ import javax.management.MBeanServer;
 import javax.management.MBeanServerFactory;
 import javax.management.ObjectName;
 import java.io.IOException;
+import java.lang.invoke.MethodHandles;
 import java.lang.management.ManagementFactory;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -37,6 +38,7 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import com.google.common.collect.ImmutableList;
+import org.apache.commons.io.IOUtils;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrClient;
@@ -68,12 +70,14 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean.Category;
+import org.apache.solr.core.SolrInfoBean.Category;
 import org.apache.solr.util.TestInjection;
 import org.apache.solr.util.TimeOut;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
@@ -83,6 +87,7 @@ import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
  */
 @Slow
 public class CollectionsAPIDistributedZkTest extends SolrCloudTestCase {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @BeforeClass
   public static void beforeCollectionsAPIDistributedZkTest() {
@@ -94,9 +99,11 @@ public class CollectionsAPIDistributedZkTest extends SolrCloudTestCase {
 
   @BeforeClass
   public static void setupCluster() throws Exception {
+    String solrXml = IOUtils.toString(CollectionsAPIDistributedZkTest.class.getResourceAsStream("/solr/solr-jmxreporter.xml"), "UTF-8");
     configureCluster(4)
         .addConfig("conf", configset("cloud-minimal"))
         .addConfig("conf2", configset("cloud-minimal-jmx"))
+        .withSolrXml(solrXml)
         .configure();
   }
 
@@ -549,7 +556,7 @@ public class CollectionsAPIDistributedZkTest extends SolrCloudTestCase {
     for (SolrCore core : theCores) {
 
       // look for core props file
-      Path instancedir = (Path) core.getStatistics().get("instanceDir");
+      Path instancedir = (Path) core.getResourceLoader().getInstancePath();
       assertTrue("Could not find expected core.properties file", Files.exists(instancedir.resolve("core.properties")));
 
       Path expected = Paths.get(jetty.getSolrHome()).toAbsolutePath().resolve(core.getName());
@@ -620,25 +627,22 @@ public class CollectionsAPIDistributedZkTest extends SolrCloudTestCase {
       Set<ObjectName> mbeans = new HashSet<>();
       mbeans.addAll(server.queryNames(null, null));
       for (final ObjectName mbean : mbeans) {
-        Object value;
-        Object indexDir;
-        Object name;
 
         try {
-          if (((value = server.getAttribute(mbean, "category")) != null && value
-              .toString().equals(Category.CORE.toString()))
-              && ((indexDir = server.getAttribute(mbean, "coreName")) != null)
-              && ((indexDir = server.getAttribute(mbean, "indexDir")) != null)
-              && ((name = server.getAttribute(mbean, "name")) != null)) {
-            if (!indexDirToShardNamesMap.containsKey(indexDir.toString())) {
-              indexDirToShardNamesMap.put(indexDir.toString(),
-                  new HashSet<String>());
+          Map<String, String> props = mbean.getKeyPropertyList();
+          String category = props.get("category");
+          String name = props.get("name");
+          if ((category != null && category.toString().equals(Category.CORE.toString())) &&
+              (name != null && name.equals("indexDir"))) {
+            String indexDir = server.getAttribute(mbean, "Value").toString();
+            String key = props.get("dom2") + "." + props.get("dom3") + "." + props.get("dom4");
+            if (!indexDirToShardNamesMap.containsKey(indexDir)) {
+              indexDirToShardNamesMap.put(indexDir.toString(), new HashSet<>());
             }
-            indexDirToShardNamesMap.get(indexDir.toString()).add(
-                name.toString());
+            indexDirToShardNamesMap.get(indexDir.toString()).add(key);
           }
         } catch (Exception e) {
-          // ignore, just continue - probably a "category" or "source" attribute
+          // ignore, just continue - probably a "Value" attribute
           // not found
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
index b6754c7..d2d6a16 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java
@@ -113,13 +113,15 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
 
     // After draining the queue, a watcher should be set.
     assertNull(dq.peek(100));
-    assertTrue(dq.hasWatcher());
+    assertFalse(dq.isDirty());
+    assertEquals(1, dq.watcherCount());
 
     forceSessionExpire();
 
     // Session expiry should have fired the watcher.
     Thread.sleep(100);
-    assertFalse(dq.hasWatcher());
+    assertTrue(dq.isDirty());
+    assertEquals(0, dq.watcherCount());
 
     // Rerun the earlier test make sure updates are still seen, post reconnection.
     future = executor.submit(() -> new String(dq.peek(true), UTF8));
@@ -138,6 +140,50 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
   }
 
   @Test
+  public void testLeakChildWatcher() throws Exception {
+    String dqZNode = "/distqueue/test";
+    DistributedQueue dq = makeDistributedQueue(dqZNode);
+    assertTrue(dq.peekElements(1, 1, s1 -> true).isEmpty());
+    assertEquals(1, dq.watcherCount());
+    assertFalse(dq.isDirty());
+    assertTrue(dq.peekElements(1, 1, s1 -> true).isEmpty());
+    assertEquals(1, dq.watcherCount());
+    assertFalse(dq.isDirty());
+    assertNull(dq.peek());
+    assertEquals(1, dq.watcherCount());
+    assertFalse(dq.isDirty());
+    assertNull(dq.peek(10));
+    assertEquals(1, dq.watcherCount());
+    assertFalse(dq.isDirty());
+
+    dq.offer("hello world".getBytes(UTF8));
+    assertNotNull(dq.peek()); // synchronously available
+    // dirty and watcher state indeterminate here, race with watcher
+    Thread.sleep(100); // watcher should have fired now
+    assertNotNull(dq.peek());
+    assertEquals(1, dq.watcherCount());
+    assertFalse(dq.isDirty());
+    assertFalse(dq.peekElements(1, 1, s -> true).isEmpty());
+    assertEquals(1, dq.watcherCount());
+    assertFalse(dq.isDirty());
+  }
+
+  @Test
+  public void testLocallyOffer() throws Exception {
+    String dqZNode = "/distqueue/test";
+    DistributedQueue dq = makeDistributedQueue(dqZNode);
+    dq.peekElements(1, 1, s -> true);
+    for (int i = 0; i < 100; i++) {
+      byte[] data = String.valueOf(i).getBytes(UTF8);
+      dq.offer(data);
+      assertNotNull(dq.peek());
+      dq.poll();
+      dq.peekElements(1, 1, s -> true);
+    }
+  }
+
+
+  @Test
   public void testPeekElements() throws Exception {
     String dqZNode = "/distqueue/test";
     byte[] data = "hello world".getBytes(UTF8);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
new file mode 100644
index 0000000..4368fea
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.client.solrj.response.CoreAdminResponse;
+import org.apache.solr.client.solrj.response.RequestStatusState;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MoveReplicaTest extends SolrCloudTestCase {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    configureCluster(4)
+        .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf"))
+        .configure();
+  }
+
+  protected String getSolrXml() {
+    return "solr.xml";
+  }
+
+  @Test
+  public void test() throws Exception {
+    cluster.waitForAllNodes(5000);
+    String coll = "movereplicatest_coll";
+    log.info("total_jettys: " + cluster.getJettySolrRunners().size());
+
+    CloudSolrClient cloudClient = cluster.getSolrClient();
+
+    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, 2);
+    create.setMaxShardsPerNode(2);
+    cloudClient.request(create);
+
+    Replica replica = getRandomReplica(coll, cloudClient);
+    Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
+    ArrayList<String> l = new ArrayList<>(liveNodes);
+    Collections.shuffle(l, random());
+    String targetNode = null;
+    for (String node : liveNodes) {
+      if (!replica.getNodeName().equals(node)) {
+        targetNode = node;
+        break;
+      }
+    }
+    assertNotNull(targetNode);
+    String shardId = null;
+    for (Slice slice : cloudClient.getZkStateReader().getClusterState().getCollection(coll).getSlices()) {
+      if (slice.getReplicas().contains(replica)) {
+        shardId = slice.getName();
+      }
+    }
+
+    CollectionAdminRequest.MoveReplica moveReplica = new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode);
+    moveReplica.processAsync("000", cloudClient);
+    CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("000");
+    // wait for async request success
+    boolean success = false;
+    for (int i = 0; i < 200; i++) {
+      CollectionAdminRequest.RequestStatusResponse rsp = requestStatus.process(cloudClient);
+      if (rsp.getRequestStatus() == RequestStatusState.COMPLETED) {
+        success = true;
+        break;
+      }
+      assertFalse(rsp.getRequestStatus() == RequestStatusState.FAILED);
+      Thread.sleep(50);
+    }
+    assertTrue(success);
+    checkNumOfCores(cloudClient, replica.getNodeName(), 0);
+    checkNumOfCores(cloudClient, targetNode, 2);
+
+    moveReplica = new CollectionAdminRequest.MoveReplica(coll, shardId, targetNode, replica.getNodeName());
+    moveReplica.process(cloudClient);
+    checkNumOfCores(cloudClient, replica.getNodeName(), 1);
+    checkNumOfCores(cloudClient, targetNode, 1);
+  }
+
+  private Replica getRandomReplica(String coll, CloudSolrClient cloudClient) {
+    List<Replica> replicas = cloudClient.getZkStateReader().getClusterState().getCollection(coll).getReplicas();
+    Collections.shuffle(replicas, random());
+    return replicas.get(0);
+  }
+
+  private void checkNumOfCores(CloudSolrClient cloudClient, String nodeName, int expectedCores) throws IOException, SolrServerException {
+    assertEquals(nodeName + " does not have expected number of cores",expectedCores, getNumOfCores(cloudClient, nodeName));
+  }
+
+  private int getNumOfCores(CloudSolrClient cloudClient, String nodeName) throws IOException, SolrServerException {
+    try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(nodeName))) {
+      CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient);
+      return status.getCoreStatus().size();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java b/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java
new file mode 100644
index 0000000..ffa6ba2
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.solr.SolrTestCaseJ4Test;
+import org.apache.solr.cloud.overseer.NodeMutator;
+import org.apache.solr.cloud.overseer.ZkWriteCommand;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.junit.Test;
+
+public class NodeMutatorTest extends SolrTestCaseJ4Test {
+
+  private static final String NODE3 = "baseUrl3_";
+  private static final String NODE3_URL = "http://baseUrl3";
+
+  private static final String NODE2 = "baseUrl2_";
+  private static final String NODE2_URL = "http://baseUrl2";
+
+  private static final String NODE1 = "baseUrl1_";
+  private static final String NODE1_URL = "http://baseUrl1";
+
+  @Test
+  public void downNodeReportsAllImpactedCollectionsAndNothingElse() throws IOException {
+    NodeMutator nm = new NodeMutator();
+    ZkNodeProps props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, NODE1);
+
+    //We use 2 nodes with maxShardsPerNode as 1
+    //Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2
+    //Collection2: 1 shard X 1 replica = replica1 on node2
+    ClusterStateMockUtil.Result result = ClusterStateMockUtil.buildClusterState(null, "csrr2rD*csr2", 1, 1, NODE1, NODE2);
+    ClusterState clusterState = result.reader.getClusterState();
+    assertEquals(clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL);
+    assertEquals(clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL);
+    assertEquals(clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL);
+
+    props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, NODE1);
+    List<ZkWriteCommand> writes = nm.downNode(clusterState, props);
+    assertEquals(writes.size(), 1);
+    assertEquals(writes.get(0).name, "collection1");
+    assertEquals(writes.get(0).collection.getReplica("replica1").getState(), Replica.State.DOWN);
+    assertEquals(writes.get(0).collection.getReplica("replica2").getState(), Replica.State.ACTIVE);
+    result.close();
+
+    //We use 3 nodes with maxShardsPerNode as 1
+    //Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2
+    //Collection2: 1 shard X 1 replica = replica1 on node2
+    //Collection3: 1 shard X 3 replica = replica1 on node1 , replica2 on node2, replica3 on node3
+    result = ClusterStateMockUtil.buildClusterState(null, "csrr2rD*csr2csr1r2r3", 1, 1, NODE1, NODE2, NODE3);
+    clusterState = result.reader.getClusterState();
+    assertEquals(clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL);
+    assertEquals(clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL);
+
+    assertEquals(clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL);
+
+    assertEquals(clusterState.getCollection("collection3").getReplica("replica5").getBaseUrl(), NODE1_URL);
+    assertEquals(clusterState.getCollection("collection3").getReplica("replica6").getBaseUrl(), NODE2_URL);
+    assertEquals(clusterState.getCollection("collection3").getReplica("replica7").getBaseUrl(), NODE3_URL);
+
+    writes = nm.downNode(clusterState, props);
+    assertEquals(writes.size(), 2);
+    for (ZkWriteCommand write : writes) {
+      if (write.name.equals("collection1")) {
+        assertEquals(write.collection.getReplica("replica1").getState(), Replica.State.DOWN);
+        assertEquals(write.collection.getReplica("replica2").getState(), Replica.State.ACTIVE);
+      } else if (write.name.equals("collection3")) {
+        assertEquals(write.collection.getReplica("replica5").getState(), Replica.State.DOWN);
+        assertEquals(write.collection.getReplica("replica6").getState(), Replica.State.ACTIVE);
+        assertEquals(write.collection.getReplica("replica7").getState(), Replica.State.ACTIVE);
+      } else {
+        fail("No other collection needs to be changed");
+      }
+    }
+    result.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
index 9441e3f..9100eee 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
@@ -18,7 +18,6 @@ package org.apache.solr.cloud;
 
 import java.io.File;
 import java.lang.invoke.MethodHandles;
-import java.net.ServerSocket;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
@@ -71,14 +70,6 @@ public class ReplicationFactorTest extends AbstractFullDistribZkTestBase {
     return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride);
   }
   
-  protected int getNextAvailablePort() throws Exception {    
-    int port = -1;
-    try (ServerSocket s = new ServerSocket(0)) {
-      port = s.getLocalPort();
-    }
-    return port;
-  }
-
   @Test
   public void test() throws Exception {
     log.info("replication factor test running");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
index f5fee21..3423420 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
@@ -16,30 +16,16 @@
  */
 package org.apache.solr.cloud;
 
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.cloud.OverseerAutoReplicaFailoverThread.DownReplica;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.util.Utils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.apache.solr.cloud.ClusterStateMockUtil.buildClusterState;
+
 public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
   private static final String NODE6 = "baseUrl6_";
   private static final String NODE6_URL = "http://baseUrl6";
@@ -58,12 +44,8 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
 
   private static final String NODE1 = "baseUrl1_";
   private static final String NODE1_URL = "http://baseUrl1";
-
-  private final static Pattern BLUEPRINT = Pattern.compile("([a-z])(\\d+)?(?:(['A','R','D','F']))?(\\*)?");
-
-  private int buildNumber = 1;
   
-  private List<Result> results;
+  private List<ClusterStateMockUtil.Result> results;
   
   @Before
   public void setUp() throws Exception {
@@ -74,61 +56,50 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
   @After
   public void tearDown() throws Exception {
     super.tearDown();
-    for (Result result : results) {
+    for (ClusterStateMockUtil.Result result : results) {
       result.close();
     }
   }
   
   @Test
   public void testGetBestCreateUrlBasics() {
-    Result result = buildClusterState("csr1R*r2", NODE1);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr1R*r2", NODE1);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertNull("Should be no live node to failover to", createUrl);
     
-    result = buildClusterState("csr1R*r2", NODE1, NODE2);
+    result = buildClusterState(results, "csr1R*r2", NODE1, NODE2);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertNull("Only failover candidate node already has a replica", createUrl);
     
-    result = buildClusterState("csr1R*r2sr3", NODE1, NODE2, NODE3);
+    result = buildClusterState(results, "csr1R*r2sr3", NODE1, NODE2, NODE3);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals("Node3 does not have a replica from the bad slice and should be the best choice", NODE3_URL, createUrl);
 
-    result = buildClusterState("csr1R*r2Fsr3r4r5", NODE1, NODE2, NODE3);
+    result = buildClusterState(results, "csr1R*r2Fsr3r4r5", NODE1, NODE2, NODE3);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertTrue(createUrl.equals(NODE3_URL));
 
-    result = buildClusterState("csr1*r2r3sr3r3sr4", NODE1, NODE2, NODE3, NODE4);
+    result = buildClusterState(results, "csr1*r2r3sr3r3sr4", NODE1, NODE2, NODE3, NODE4);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE4_URL, createUrl);
     
-    result = buildClusterState("csr1*r2sr3r3sr4sr4", NODE1, NODE2, NODE3, NODE4);
+    result = buildClusterState(results, "csr1*r2sr3r3sr4sr4", NODE1, NODE2, NODE3, NODE4);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertTrue(createUrl.equals(NODE3_URL) || createUrl.equals(NODE4_URL));
   }
-  
-  
-  private static class Result implements Closeable {
-    DownReplica badReplica;
-    ZkStateReader reader;
-    
-    @Override
-    public void close() throws IOException {
-      reader.close();
-    }
-  }
 
   @Test
   public void testGetBestCreateUrlMultipleCollections() throws Exception {
 
-    Result result = buildClusterState("csr*r2csr2", NODE1);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr*r2csr2", NODE1);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertNull(createUrl);
 
-    result = buildClusterState("csr*r2csr2", NODE1);
+    result = buildClusterState(results, "csr*r2csr2", NODE1);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertNull(createUrl);
 
-    result = buildClusterState("csr*r2csr2", NODE1, NODE2);
+    result = buildClusterState(results, "csr*r2csr2", NODE1, NODE2);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertNull(createUrl);
   }
@@ -136,11 +107,11 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
   @Test
   public void testGetBestCreateUrlMultipleCollections2() {
     
-    Result result = buildClusterState("csr*r2sr3cr2", NODE1);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr*r2sr3cr2", NODE1);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertNull(createUrl);
 
-    result = buildClusterState("csr*r2sr3cr2", NODE1, NODE2, NODE3);
+    result = buildClusterState(results, "csr*r2sr3cr2", NODE1, NODE2, NODE3);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE3_URL, createUrl);
   }
@@ -148,253 +119,73 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
   
   @Test
   public void testGetBestCreateUrlMultipleCollections3() {
-    Result result = buildClusterState("csr5r1sr4r2sr3r6csr2*r6sr5r3sr4r3", NODE1, NODE4, NODE5, NODE6);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr5r1sr4r2sr3r6csr2*r6sr5r3sr4r3", NODE1, NODE4, NODE5, NODE6);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE1_URL, createUrl);
   }
   
   @Test
   public void testGetBestCreateUrlMultipleCollections4() {
-    Result result = buildClusterState("csr1r4sr3r5sr2r6csr5r6sr4r6sr5*r4", NODE6);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr1r4sr3r5sr2r6csr5r6sr4r6sr5*r4", NODE6);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE6_URL, createUrl);
   }
   
   @Test
   public void testFailOverToEmptySolrInstance() {
-    Result result = buildClusterState("csr1*r1sr1csr1", NODE2);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr1*r1sr1csr1", NODE2);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE2_URL, createUrl);
   }
   
   @Test
   public void testFavorForeignSlices() {
-    Result result = buildClusterState("csr*sr2csr3r3", NODE2, NODE3);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr*sr2csr3r3", NODE2, NODE3);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE3_URL, createUrl);
     
-    result = buildClusterState("csr*sr2csr3r3r3r3r3r3r3", NODE2, NODE3);
+    result = buildClusterState(results, "csr*sr2csr3r3r3r3r3r3r3", NODE2, NODE3);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE2_URL, createUrl);
   }
 
   @Test
   public void testCollectionMaxNodesPerShard() {
-    Result result = buildClusterState("csr*sr2", 1, 1, NODE2);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr*sr2", 1, 1, NODE2);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertNull(createUrl);
 
-    result = buildClusterState("csr*sr2", 1, 2, NODE2);
+    result = buildClusterState(results, "csr*sr2", 1, 2, NODE2);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE2_URL, createUrl);
 
-    result = buildClusterState("csr*csr2r2", 1, 1, NODE2);
+    result = buildClusterState(results, "csr*csr2r2", 1, 1, NODE2);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
     assertEquals(NODE2_URL, createUrl);
   }
 
   @Test
   public void testMaxCoresPerNode() {
-    Result result = buildClusterState("csr*sr2", 1, 1, NODE2);
+    ClusterStateMockUtil.Result result = buildClusterState(results, "csr*sr2", 1, 1, NODE2);
     String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 1);
     assertNull(createUrl);
 
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
     assertNull(createUrl);
 
-    result = buildClusterState("csr*sr2", 1, 2, NODE2);
+    result = buildClusterState(results, "csr*sr2", 1, 2, NODE2);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
     assertEquals(NODE2_URL, createUrl);
 
-    result = buildClusterState("csr*sr2sr3sr4", 1, 1, NODE2, NODE3, NODE4);
+    result = buildClusterState(results, "csr*sr2sr3sr4", 1, 1, NODE2, NODE3, NODE4);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 1);
     assertNull(createUrl);
 
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
     assertNull(createUrl);
 
-    result = buildClusterState("csr*sr2sr3sr4", 1, 2, NODE2, NODE3, NODE4);
+    result = buildClusterState(results, "csr*sr2sr3sr4", 1, 2, NODE2, NODE3, NODE4);
     createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
     assertTrue(createUrl.equals(NODE3_URL) || createUrl.equals(NODE4_URL));
   }
-  
-  private Result buildClusterState(String string, String ... liveNodes) {
-    return buildClusterState(string, 1, liveNodes);
-  }
-  
-  private Result buildClusterState(String string, int replicationFactor, String ... liveNodes) {
-    return buildClusterState(string, replicationFactor, 10, liveNodes);
-  }
-  
-  /**
-   * This method lets you construct a complex ClusterState object by using simple strings of letters.
-   * 
-   * c = collection, s = slice, r = replica, \d = node number (r2 means the replica is on node 2), 
-   * state = [A,R,D,F], * = replica to replace, binds to the left.
-   * 
-   * For example:
-   * csrr2rD*sr2csr
-   * 
-   * Creates:
-   * 
-   * 'csrr2rD*'
-   * A collection, a shard, a replica on node 1 (the default) that is active (the default), a replica on node 2, and a replica on node 1
-   * that has a state of down and is the replica we will be looking to put somewhere else (the *).
-   * 
-   * 'sr2'
-   * Then, another shard that has a replica on node 2.
-   * 
-   * 'csr'
-   * Then, another collection that has a shard with a single active replica on node 1.
-   * 
-   * Result:
-   *        {
-   *         "collection2":{
-   *           "maxShardsPerNode":"1",
-   *           "replicationFactor":"1",
-   *           "shards":{"slice1":{
-   *               "state":"active",
-   *               "replicas":{"replica5":{
-   *                   "state":"active",
-   *                   "node_name":"baseUrl1_",
-   *                   "base_url":"http://baseUrl1"}}}}},
-   *         "collection1":{
-   *           "maxShardsPerNode":"1",
-   *           "replicationFactor":"1",
-   *           "shards":{
-   *             "slice1":{
-   *               "state":"active",
-   *               "replicas":{
-   *                 "replica3 (bad)":{
-   *                   "state":"down",
-   *                   "node_name":"baseUrl1_",
-   *                   "base_url":"http://baseUrl1"},
-   *                 "replica2":{
-   *                   "state":"active",
-   *                   "node_name":"baseUrl2_",
-   *                   "base_url":"http://baseUrl2"},
-   *                 "replica1":{
-   *                   "state":"active",
-   *                   "node_name":"baseUrl1_",
-   *                   "base_url":"http://baseUrl1"}}},
-   *             "slice2":{
-   *               "state":"active",
-   *               "replicas":{"replica4":{
-   *                   "state":"active",
-   *                   "node_name":"baseUrl2_",
-   *                   "base_url":"http://baseUrl2"}}}}}}
-   * 
-   */
-  @SuppressWarnings("resource")
-  private Result buildClusterState(String clusterDescription, int replicationFactor, int maxShardsPerNode, String ... liveNodes) {
-    Result result = new Result();
-    
-    Map<String,Slice> slices = null;
-    Map<String,Replica> replicas = null;
-    Map<String,Object> collectionProps = new HashMap<>();
-    collectionProps.put(ZkStateReader.MAX_SHARDS_PER_NODE, Integer.toString(maxShardsPerNode));
-    collectionProps.put(ZkStateReader.REPLICATION_FACTOR, Integer.toString(replicationFactor));
-    Map<String,DocCollection> collectionStates = new HashMap<>();
-    DocCollection docCollection = null;
-    Slice slice = null;
-    int replicaCount = 1;
-    
-    Matcher m = BLUEPRINT.matcher(clusterDescription);
-    while (m.find()) {
-      Replica replica;
-      switch (m.group(1)) {
-        case "c":
-          slices = new HashMap<>();
-          docCollection = new DocCollection("collection" + (collectionStates.size() + 1), slices, collectionProps, null);
-          collectionStates.put(docCollection.getName(), docCollection);
-          break;
-        case "s":
-          replicas = new HashMap<>();
-          slice = new Slice("slice" + (slices.size() + 1), replicas, null);
-          slices.put(slice.getName(), slice);
-          break;
-        case "r":
-          Map<String,Object> replicaPropMap = new HashMap<>();
-          String node;
-
-          node = m.group(2);
-          
-          if (node == null || node.trim().length() == 0) {
-            node = "1";
-          }
-          
-          Replica.State state = Replica.State.ACTIVE;
-          String stateCode = m.group(3);
-
-          if (stateCode != null) {
-            switch (stateCode.charAt(0)) {
-              case 'S':
-                state = Replica.State.ACTIVE;
-                break;
-              case 'R':
-                state = Replica.State.RECOVERING;
-                break;
-              case 'D':
-                state = Replica.State.DOWN;
-                break;
-              case 'F':
-                state = Replica.State.RECOVERY_FAILED;
-                break;
-              default:
-                throw new IllegalArgumentException(
-                    "Unexpected state for replica: " + stateCode);
-            }
-          }
-          
-          String nodeName = "baseUrl" + node + "_";
-          String replicaName = "replica" + replicaCount++;
-          
-          if ("*".equals(m.group(4))) {
-            replicaName += " (bad)";
-          }
-          
-          replicaPropMap.put(ZkStateReader.NODE_NAME_PROP, nodeName);
-          replicaPropMap.put(ZkStateReader.BASE_URL_PROP, "http://baseUrl" + node);
-          replicaPropMap.put(ZkStateReader.STATE_PROP, state.toString());
-          
-          replica = new Replica(replicaName, replicaPropMap);
-          
-          if ("*".equals(m.group(4))) {
-            result.badReplica = new DownReplica();
-            result.badReplica.replica = replica;
-            result.badReplica.slice = slice;
-            result.badReplica.collection = docCollection;
-          }
-          
-          replicas.put(replica.getName(), replica);
-          break;
-        default:
-          break;
-      }
-    }
-  
-    // trunk briefly had clusterstate taking a zkreader :( this was required to work around that - leaving
-    // until that issue is resolved.
-    MockZkStateReader reader = new MockZkStateReader(null, collectionStates.keySet());
-    ClusterState clusterState = new ClusterState(1, new HashSet<>(Arrays.asList(liveNodes)), collectionStates);
-    reader = new MockZkStateReader(clusterState, collectionStates.keySet());
-    
-    String json;
-    try {
-      json = new String(Utils.toJSON(clusterState), "UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      throw new RuntimeException("Unexpected");
-    }
-    System.err.println("build:" + buildNumber++);
-    System.err.println(json);
-    
-    assert result.badReplica != null : "Is there no bad replica?";
-    assert result.badReplica.slice != null : "Is there no bad replica?";
-    
-    result.reader = reader;
-    
-    results.add(result);
-
-    return result;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java
index 1c1c5c1..d7b9d8a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import com.codahale.metrics.Counter;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.SolrTestCaseJ4;
@@ -39,6 +40,7 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.request.SolrRequestHandler;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -109,10 +111,13 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase
     Map<String, Integer> shardVsCount = new HashMap<>();
     for (JettySolrRunner runner : jettys) {
       CoreContainer container = runner.getCoreContainer();
+      SolrMetricManager metricManager = container.getMetricManager();
       for (SolrCore core : container.getCores()) {
+        String registry = core.getCoreMetricManager().getRegistryName();
+        Counter cnt = metricManager.counter(null, registry, "requests", "QUERY.standard");
         SolrRequestHandler select = core.getRequestHandler("");
-        long c = (long) select.getStatistics().get("requests");
-        shardVsCount.put(core.getName(), (int) c);
+//        long c = (long) select.getStatistics().get("requests");
+        shardVsCount.put(core.getName(), (int) cnt.getCount());
       }
     }
 
@@ -190,6 +195,10 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase
       }
       assertNotNull(leaderCore);
 
+      SolrMetricManager leaderMetricManager = leaderCore.getCoreContainer().getMetricManager();
+      String leaderRegistry = leaderCore.getCoreMetricManager().getRegistryName();
+      Counter cnt = leaderMetricManager.counter(null, leaderRegistry, "requests", "QUERY.standard");
+
       // All queries should be served by the active replica
       // To make sure that's true we keep querying the down replica
       // If queries are getting processed by the down replica then the cluster state hasn't updated for that replica
@@ -200,8 +209,7 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase
         count++;
         client.query(new SolrQuery("*:*"));
 
-        SolrRequestHandler select = leaderCore.getRequestHandler("");
-        long c = (long) select.getStatistics().get("requests");
+        long c = cnt.getCount();
 
         if (c == 1) {
           break; // cluster state has got update locally
@@ -222,8 +230,7 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase
         client.query(new SolrQuery("*:*"));
         count++;
 
-        SolrRequestHandler select = leaderCore.getRequestHandler("");
-        long c = (long) select.getStatistics().get("requests");
+        long c = cnt.getCount();
 
         assertEquals("Query wasn't served by leader", count, c);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
index 1b830ad..58d499b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
@@ -16,15 +16,37 @@
  */
 package org.apache.solr.cloud.hdfs;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
 import com.carrotsearch.randomizedtesting.annotations.Nightly;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Metric;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.client.solrj.request.CoreStatus;
+import org.apache.solr.client.solrj.response.CoreAdminResponse;
 import org.apache.solr.cloud.CollectionsAPIDistributedZkTest;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkConfigManager;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.util.BadHdfsThreadsFilter;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 @Slow
 @Nightly
@@ -59,4 +81,96 @@ public class HdfsCollectionsAPIDistributedZkTest extends CollectionsAPIDistribut
     System.clearProperty("solr.hdfs.home");
   }
 
+  @Test
+  public void moveReplicaTest() throws Exception {
+    cluster.waitForAllNodes(5000);
+    String coll = "movereplicatest_coll";
+
+    CloudSolrClient cloudClient = cluster.getSolrClient();
+
+    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf", 2, 2);
+    create.setMaxShardsPerNode(2);
+    cloudClient.request(create);
+
+    for (int i = 0; i < 10; i++) {
+      cloudClient.add(coll, sdoc("id",String.valueOf(i)));
+      cloudClient.commit(coll);
+    }
+
+    List<Slice> slices = new ArrayList<>(cloudClient.getZkStateReader().getClusterState().getCollection(coll).getSlices());
+    Collections.shuffle(slices, random());
+    Slice slice = null;
+    Replica replica = null;
+    for (Slice s : slices) {
+      slice = s;
+      for (Replica r : s.getReplicas()) {
+        if (s.getLeader() != r) {
+          replica = r;
+        }
+      }
+    }
+    String dataDir = getDataDir(replica);
+
+    Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
+    ArrayList<String> l = new ArrayList<>(liveNodes);
+    Collections.shuffle(l, random());
+    String targetNode = null;
+    for (String node : liveNodes) {
+      if (!replica.getNodeName().equals(node)) {
+        targetNode = node;
+        break;
+      }
+    }
+    assertNotNull(targetNode);
+
+    CollectionAdminRequest.MoveReplica moveReplica = new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode);
+    moveReplica.process(cloudClient);
+
+    checkNumOfCores(cloudClient, replica.getNodeName(), 0);
+    checkNumOfCores(cloudClient, targetNode, 2);
+
+    waitForState("Wait for recovery finish failed",coll, clusterShape(2,2));
+    slice = cloudClient.getZkStateReader().getClusterState().getCollection(coll).getSlice(slice.getName());
+    boolean found = false;
+    for (Replica newReplica : slice.getReplicas()) {
+      if (getDataDir(newReplica).equals(dataDir)) {
+        found = true;
+      }
+    }
+    assertTrue(found);
+
+
+    // data dir is reused so replication will be skipped
+    for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+      SolrMetricManager manager = jetty.getCoreContainer().getMetricManager();
+      List<String> registryNames = manager.registryNames().stream()
+          .filter(s -> s.startsWith("solr.core.")).collect(Collectors.toList());
+      for (String registry : registryNames) {
+        Map<String, Metric> metrics = manager.registry(registry).getMetrics();
+        Counter counter = (Counter) metrics.get("REPLICATION./replication.requests");
+        if (counter != null) {
+          assertEquals(0, counter.getCount());
+        }
+      }
+    }
+  }
+
+
+  private void checkNumOfCores(CloudSolrClient cloudClient, String nodeName, int expectedCores) throws IOException, SolrServerException {
+    assertEquals(nodeName + " does not have expected number of cores",expectedCores, getNumOfCores(cloudClient, nodeName));
+  }
+
+  private int getNumOfCores(CloudSolrClient cloudClient, String nodeName) throws IOException, SolrServerException {
+    try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(nodeName))) {
+      CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient);
+      return status.getCoreStatus().size();
+    }
+  }
+
+  private String getDataDir(Replica replica) throws IOException, SolrServerException {
+    try (HttpSolrClient coreclient = getHttpSolrClient(replica.getBaseUrl())) {
+      CoreStatus status = CoreAdminRequest.getCoreStatus(replica.getCoreName(), coreclient);
+      return status.getDataDirectory();
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
index 5f0d537..aa42664 100644
--- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
+++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
@@ -19,7 +19,7 @@ package org.apache.solr.core;
 import java.util.Map;
 
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.response.SolrQueryResponse;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -88,11 +88,11 @@ public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 {
   public void testCacheAssumptions() throws Exception {
     String fq= "name:d*";
     SolrCore core = h.getCore();
-    SolrInfoMBean filterCacheStats = core.getInfoRegistry().get("filterCache");
-    long fqInserts = (long) filterCacheStats.getStatistics().get("inserts");
+    MetricsMap filterCacheStats = (MetricsMap)core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.filterCache");
+    long fqInserts = (long) filterCacheStats.getValue().get("inserts");
 
-    SolrInfoMBean queryCacheStats = core.getInfoRegistry().get("queryResultCache");
-    long qrInserts = (long) queryCacheStats.getStatistics().get("inserts");
+    MetricsMap queryCacheStats = (MetricsMap)core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache");
+    long qrInserts = (long) queryCacheStats.getValue().get("inserts");
 
     // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed and it gets 100 back and the for loop below
     // succeeds.
@@ -105,16 +105,16 @@ public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 {
     assertTrue("Should have partial results", (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)));
 
     assertEquals("Should NOT have inserted partial results in the cache!",
-        (long) queryCacheStats.getStatistics().get("inserts"), qrInserts);
+        (long) queryCacheStats.getValue().get("inserts"), qrInserts);
 
-    assertEquals("Should NOT have another insert", fqInserts, (long) filterCacheStats.getStatistics().get("inserts"));
+    assertEquals("Should NOT have another insert", fqInserts, (long) filterCacheStats.getValue().get("inserts"));
 
     // At the end of all this, we should have no hits in the queryResultCache.
     response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", longTimeout));
 
     // Check that we did insert this one.
-    assertEquals("Hits should still be 0", (long) filterCacheStats.getStatistics().get("hits"), 0L);
-    assertEquals("Inserts should be bumped", (long) filterCacheStats.getStatistics().get("inserts"), fqInserts + 1);
+    assertEquals("Hits should still be 0", (long) filterCacheStats.getValue().get("hits"), 0L);
+    assertEquals("Inserts should be bumped", (long) filterCacheStats.getValue().get("inserts"), fqInserts + 1);
 
     res = (Map) ObjectBuilder.fromJSON(response);
     body = (Map) (res.get("response"));
@@ -130,14 +130,14 @@ public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 {
   public void testQueryResults() throws Exception {
     String q = "name:e*";
     SolrCore core = h.getCore();
-    SolrInfoMBean queryCacheStats = core.getInfoRegistry().get("queryResultCache");
-    NamedList nl = queryCacheStats.getStatistics();
+    MetricsMap queryCacheStats = (MetricsMap)core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache");
+    Map<String,Object> nl = queryCacheStats.getValue();
     long inserts = (long) nl.get("inserts");
 
     String response = JQ(req("q", q, "indent", "true", "timeAllowed", "1", "sleep", sleep));
 
     // The queryResultCache should NOT get an entry here.
-    nl = queryCacheStats.getStatistics();
+    nl = queryCacheStats.getValue();
     assertEquals("Should NOT have inserted partial results!", inserts, (long) nl.get("inserts"));
 
     Map res = (Map) ObjectBuilder.fromJSON(response);
@@ -150,7 +150,7 @@ public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 {
     response = JQ(req("q", q, "indent", "true", "timeAllowed", longTimeout));
 
     // Check that we did insert this one.
-    NamedList nl2 = queryCacheStats.getStatistics();
+    Map<String,Object> nl2 = queryCacheStats.getValue();
     assertEquals("Hits should still be 0", (long) nl.get("hits"), (long) nl2.get("hits"));
     assertTrue("Inserts should be bumped", inserts < (long) nl2.get("inserts"));
 


[19/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java b/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
index e22cf9b..252162c 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
@@ -19,6 +19,7 @@ package org.apache.lucene.search;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 
+import org.apache.lucene.document.InetAddressPoint;
 import org.apache.lucene.document.InetAddressRange;
 import org.apache.lucene.util.StringHelper;
 
@@ -28,16 +29,12 @@ import org.apache.lucene.util.StringHelper;
 public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
   private static final String FIELD_NAME = "ipRangeField";
 
-  private IPVersion ipVersion;
-
-  private enum IPVersion {IPv4, IPv6}
-
   @Override
   protected Range nextRange(int dimensions) throws Exception {
     InetAddress min = nextInetaddress();
-    byte[] bMin = min.getAddress();
+    byte[] bMin = InetAddressPoint.encode(min);
     InetAddress max = nextInetaddress();
-    byte[] bMax = max.getAddress();
+    byte[] bMax = InetAddressPoint.encode(max);
     if (StringHelper.compare(bMin.length, bMin, 0, bMax, 0) > 0) {
       return new IpRange(max, min);
     }
@@ -46,89 +43,74 @@ public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
 
   /** return random IPv4 or IPv6 address */
   private InetAddress nextInetaddress() throws UnknownHostException {
-    byte[] b;
-    switch (ipVersion) {
-      case IPv4:
-        b = new byte[4];
-        break;
-      case IPv6:
-        b = new byte[16];
-        break;
-      default:
-        throw new IllegalArgumentException("incorrect IP version: " + ipVersion);
-    }
+    byte[] b = random().nextBoolean() ? new byte[4] : new byte[16];
     random().nextBytes(b);
     return InetAddress.getByAddress(b);
   }
 
-  /** randomly select version across tests */
-  private IPVersion ipVersion() {
-    return random().nextBoolean() ? IPVersion.IPv4 : IPVersion.IPv6;
-  }
-
   @Override
   public void testRandomTiny() throws Exception {
-    ipVersion = ipVersion();
     super.testRandomTiny();
   }
 
   @Override
   public void testMultiValued() throws Exception {
-    ipVersion = ipVersion();
     super.testRandomMedium();
   }
 
   @Override
   public void testRandomMedium() throws Exception {
-    ipVersion = ipVersion();
     super.testMultiValued();
   }
 
   @Nightly
   @Override
   public void testRandomBig() throws Exception {
-    ipVersion = ipVersion();
     super.testRandomBig();
   }
 
   /** return random range */
   @Override
   protected InetAddressRange newRangeField(Range r) {
-    return new InetAddressRange(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+    return new InetAddressRange(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
   }
 
   /** return random intersects query */
   @Override
   protected Query newIntersectsQuery(Range r) {
-    return InetAddressRange.newIntersectsQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+    return InetAddressRange.newIntersectsQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
   }
 
   /** return random contains query */
   @Override
   protected Query newContainsQuery(Range r) {
-    return InetAddressRange.newContainsQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+    return InetAddressRange.newContainsQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
   }
 
   /** return random within query */
   @Override
   protected Query newWithinQuery(Range r) {
-    return InetAddressRange.newWithinQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+    return InetAddressRange.newWithinQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
   }
 
   /** return random crosses query */
   @Override
   protected Query newCrossesQuery(Range r) {
-    return InetAddressRange.newCrossesQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+    return InetAddressRange.newCrossesQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
   }
 
   /** encapsulated IpRange for test validation */
   private class IpRange extends Range {
-    InetAddress min;
-    InetAddress max;
+    InetAddress minAddress;
+    InetAddress maxAddress;
+    byte[] min;
+    byte[] max;
 
     IpRange(InetAddress min, InetAddress max) {
-      this.min = min;
-      this.max = max;
+      this.minAddress = min;
+      this.maxAddress = max;
+      this.min = InetAddressPoint.encode(min);
+      this.max = InetAddressPoint.encode(max);
     }
 
     @Override
@@ -138,33 +120,39 @@ public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
 
     @Override
     protected InetAddress getMin(int dim) {
-      return min;
+      return minAddress;
     }
 
     @Override
     protected void setMin(int dim, Object val) {
-      byte[] v = ((InetAddress)val).getAddress();
+      InetAddress v = (InetAddress)val;
+      byte[] e = InetAddressPoint.encode(v);
 
-      if (StringHelper.compare(v.length, min.getAddress(), 0, v, 0) < 0) {
-        max = (InetAddress)val;
+      if (StringHelper.compare(e.length, min, 0, e, 0) < 0) {
+        max = e;
+        maxAddress = v;
       } else {
-        min = (InetAddress) val;
+        min = e;
+        minAddress = v;
       }
     }
 
     @Override
     protected InetAddress getMax(int dim) {
-      return max;
+      return maxAddress;
     }
 
     @Override
     protected void setMax(int dim, Object val) {
-      byte[] v = ((InetAddress)val).getAddress();
+      InetAddress v = (InetAddress)val;
+      byte[] e = InetAddressPoint.encode(v);
 
-      if (StringHelper.compare(v.length, max.getAddress(), 0, v, 0) > 0) {
-        min = (InetAddress)val;
+      if (StringHelper.compare(e.length, max, 0, e, 0) > 0) {
+        min = e;
+        minAddress = v;
       } else {
-        max = (InetAddress) val;
+        max = e;
+        maxAddress = v;
       }
     }
 
@@ -177,37 +165,31 @@ public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
     @Override
     protected boolean isDisjoint(Range o) {
       IpRange other = (IpRange)o;
-      byte[] bMin = min.getAddress();
-      byte[] bMax = max.getAddress();
-      return StringHelper.compare(bMin.length, bMin, 0, other.max.getAddress(), 0) > 0 ||
-          StringHelper.compare(bMax.length, bMax, 0, other.min.getAddress(), 0) < 0;
+      return StringHelper.compare(min.length, min, 0, other.max, 0) > 0 ||
+          StringHelper.compare(max.length, max, 0, other.min, 0) < 0;
     }
 
     @Override
     protected boolean isWithin(Range o) {
       IpRange other = (IpRange)o;
-      byte[] bMin = min.getAddress();
-      byte[] bMax = max.getAddress();
-      return StringHelper.compare(bMin.length, bMin, 0, other.min.getAddress(), 0) >= 0 &&
-          StringHelper.compare(bMax.length, bMax, 0, other.max.getAddress(), 0) <= 0;
+      return StringHelper.compare(min.length, min, 0, other.min, 0) >= 0 &&
+          StringHelper.compare(max.length, max, 0, other.max, 0) <= 0;
     }
 
     @Override
     protected boolean contains(Range o) {
       IpRange other = (IpRange)o;
-      byte[] bMin = min.getAddress();
-      byte[] bMax = max.getAddress();
-      return StringHelper.compare(bMin.length, bMin, 0, other.min.getAddress(), 0) <= 0 &&
-          StringHelper.compare(bMax.length, bMax, 0, other.max.getAddress(), 0) >= 0;
+      return StringHelper.compare(min.length, min, 0, other.min, 0) <= 0 &&
+          StringHelper.compare(max.length, max, 0, other.max, 0) >= 0;
     }
 
     @Override
     public String toString() {
       StringBuilder b = new StringBuilder();
       b.append("Box(");
-      b.append(min.getHostAddress());
+      b.append(minAddress.getHostAddress());
       b.append(" TO ");
-      b.append(max.getHostAddress());
+      b.append(maxAddress.getHostAddress());
       b.append(")");
       return b.toString();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java
----------------------------------------------------------------------
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java
index c3c98f7..6c5253e 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/HeatmapFacetCounter.java
@@ -142,6 +142,9 @@ public class HeatmapFacetCounter {
     }
 
     final Heatmap heatmap = new Heatmap(columns, rows, ctx.makeRectangle(heatMinX, heatMaxX, heatMinY, heatMaxY));
+    if (topAcceptDocs instanceof Bits.MatchNoBits) {
+      return heatmap; // short-circuit
+    }
 
     //All ancestor cell counts (of facetLevel) will be captured during facet visiting and applied later. If the data is
     // just points then there won't be any ancestors.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
index b03fa3d..a587653 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
@@ -630,7 +630,7 @@ public class AssertingLeafReader extends FilterLeafReader {
     }
 
     @Override
-    public int ordValue() {
+    public int ordValue() throws IOException {
       assertThread("Sorted doc values", creationThread);
       assert exists;
       int ord = in.ordValue();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/test-framework/src/java/org/apache/lucene/index/OwnCacheKeyMultiReader.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/OwnCacheKeyMultiReader.java b/lucene/test-framework/src/java/org/apache/lucene/index/OwnCacheKeyMultiReader.java
index 45aabfe..a412ed8 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/OwnCacheKeyMultiReader.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/OwnCacheKeyMultiReader.java
@@ -40,7 +40,8 @@ public final class OwnCacheKeyMultiReader extends MultiReader {
 
     @Override
     public void addClosedListener(ClosedListener listener) {
-        readerClosedListeners.add(listener);
+      ensureOpen();
+      readerClosedListeners.add(listener);
     }
 
   };

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/tools/javadoc/ecj.javadocs.prefs
----------------------------------------------------------------------
diff --git a/lucene/tools/javadoc/ecj.javadocs.prefs b/lucene/tools/javadoc/ecj.javadocs.prefs
index 61971ec..5d96e88 100644
--- a/lucene/tools/javadoc/ecj.javadocs.prefs
+++ b/lucene/tools/javadoc/ecj.javadocs.prefs
@@ -8,7 +8,7 @@ org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
 org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
 org.eclipse.jdt.core.compiler.compliance=1.8
 org.eclipse.jdt.core.compiler.doc.comment.support=enabled
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=ignore
+org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=error
 org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
 org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
 org.eclipse.jdt.core.compiler.problem.comparingIdentical=error
@@ -16,18 +16,18 @@ org.eclipse.jdt.core.compiler.problem.deadCode=ignore
 org.eclipse.jdt.core.compiler.problem.deprecation=ignore
 org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
 org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=ignore
+org.eclipse.jdt.core.compiler.problem.discouragedReference=error
 org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
 org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
 org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
 org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
 org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=ignore
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=ignore
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=ignore
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=ignore
+org.eclipse.jdt.core.compiler.problem.finalParameterBound=error
+org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=error
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=error
+org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=error
 org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=ignore
+org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=error
 org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=ignore
 org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
 org.eclipse.jdt.core.compiler.problem.invalidJavadoc=error
@@ -36,7 +36,7 @@ org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=disabled
 org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=disabled
 org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
 org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=ignore
+org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=error
 org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
 org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
 org.eclipse.jdt.core.compiler.problem.missingJavadocComments=ignore
@@ -52,10 +52,10 @@ org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMetho
 org.eclipse.jdt.core.compiler.problem.missingSerialVersion=ignore
 org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
 org.eclipse.jdt.core.compiler.problem.noEffectAssignment=error
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=ignore
+org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=error
 org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
 org.eclipse.jdt.core.compiler.problem.nullReference=ignore
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=ignore
+org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=error
 org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
 org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
 org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/tools/junit4/cached-timehints.txt
----------------------------------------------------------------------
diff --git a/lucene/tools/junit4/cached-timehints.txt b/lucene/tools/junit4/cached-timehints.txt
index f2b8974..cb3da99 100644
--- a/lucene/tools/junit4/cached-timehints.txt
+++ b/lucene/tools/junit4/cached-timehints.txt
@@ -813,7 +813,7 @@ org.apache.solr.EchoParamsTest=136,170,349,124,140,142,284
 org.apache.solr.MinimalSchemaTest=304,316,467,304,297,755,309
 org.apache.solr.OutputWriterTest=302,276,265,314,244,211,268
 org.apache.solr.SampleTest=339,290,266,243,333,414,355
-org.apache.solr.SolrInfoMBeanTest=1090,1132,644,629,637,1023,735
+org.apache.solr.SolrInfoBeanTest=1090,1132,644,629,637,1023,735
 org.apache.solr.TestDistributedGrouping=13095,9478,8420,9633,10692,9265,10893
 org.apache.solr.TestDistributedSearch=11199,9886,16211,11367,11325,10717,10392
 org.apache.solr.TestDocumentBuilder=10,10,9,13,10,9,10

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4c2dd0b..73c8ef9 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -52,6 +52,19 @@ Upgrading from Solr 6.x
 
 * Deprecated method getNumericType() has been removed from FieldType. Use getNumberType() instead
 
+* MBean names and attributes now follow hierarchical names used in metrics. This is reflected also in
+  /admin/mbeans and /admin/plugins output, and can be observed in the UI Plugins tab, because now all these
+  APIs get their data from the metrics API. The old (mostly flat) JMX view has been removed.
+
+* <jmx> element in solrconfig.xml is no longer supported. Equivalent functionality can be configured in
+  solr.xml using <metrics><reporter ...> element and SolrJmxReporter implementation. Limited back-compatibility
+  is offered by automatically adding a default instance of SolrJmxReporter if it's missing, AND when a local
+  MBean server is found (which can be activated either via ENABLE_REMOTE_JMX_OPTS in solr.in.sh or via system
+  properties, eg. -Dcom.sun.management.jmxremote). This default instance exports all Solr metrics from all
+  registries as hierarchical MBeans. This behavior can be also disabled by specifying a SolrJmxReporter
+  configuration with a boolean init arg "enabled" set to "false". For a more fine-grained control users
+  should explicitly specify at least one SolrJmxReporter configuration.
+
 New Features
 ----------------------
 * SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab)
@@ -62,11 +75,11 @@ New Features
   tuples, one for each value in the multi-valued field. (Dennis Gove)
 
 * SOLR-10339: New set-trigger and remove-trigger APIs for autoscaling. (shalin)
-
+ 
 * SOLR-10340: New set-listener and remove-listener API for autoscaling. (shalin)
-
+ 
 * SOLR-10358: New suspend-trigger and resume-trigger APIs for autoscaling. (shalin)
-
+ 
 * SOLR-10356: Adds basic math Streaming Evaluators (Dennis Gove)
 
 * SOLR-10393: Adds UUID Streaming Evaluator (Dennis Gove)
@@ -103,6 +116,17 @@ Optimizations
 Other Changes
 * SOLR-10236: Removed FieldType.getNumericType(). Use getNumberType() instead. (Tom�s Fern�ndez L�bbe)
 
+* SOLR-10347: Removed index level boost support from "documents" section of the admin UI (Amrit Sarkar via
+  Tom�s Fern�ndez L�bbe)
+
+* SOLR-9959: SolrInfoMBean category and hierarchy cleanup. Per-component statistics are now obtained from
+  the metrics API, legacy JMX support has been replaced with SolrJmxReporter functionality. Several reporter
+  improvements (support for multiple prefix filters, "enabled" flag, reuse of service clients). (ab)
+
+* SOLR-10418: Expose safe system properties via metrics API as 'system.properties' in 'solr.jvm' group.
+  Add support for selecting specific properties from any compound metric using 'property' parameter to
+  /admin/metrics handler. (ab)
+
 ----------------------
 
 ==================  6.6.0 ==================
@@ -144,6 +168,23 @@ New Features
 
 * SOLR-9993: Add support for ExpandComponent with PointFields. (Cao Manh Dat)
 
+* SOLR-10239: MOVEREPLICA API (Cao Manh Dat, Noble Paul, shalin)
+
+* SOLR-9936: Allow configuration for recoveryExecutor thread pool size. (Tim Owen via Mark Miller)
+
+* SOLR-10447: Collections API now supports a LISTALIASES command to return a list of all collection aliases.
+  (Yago Riveiro, Ishan Chattopadhyaya, Mark Miller, Steve Molloy, Shawn Heisey, Mike Drob, janhoy)
+
+* SOLR-10446: CloudSolrClient can now be initialized using the base URL of a Solr instance instead of
+  ZooKeeper hosts. This is possible through the use of newly introduced HttpClusterStateProvider.
+  To fetch a list of collection aliases, this depends on LISTALIASES command, and hence this way of
+  initializing CloudSolrClient would not work if you have collection aliases on older versions of Solr
+  server that doesn't support LISTALIASES. (Ishan Chattopadhyaya, Noble Paul)
+
+* SOLR-10082: Variance and Standard Deviation aggregators for the JSON Facet API.
+  Example: json.facet={x:"stddev(field1)", y:"variance(field2)"}
+  (Rustam Hashimov, yonik)
+
 Optimizations
 ----------------------
 
@@ -165,6 +206,28 @@ Bug Fixes
 * SOLR-10387: zkTransfer normalizes destination path incorrectly if source is a windows directory 
   (gopikannan venugopalsamy, Erick Erickson)
 
+* SOLR-10323: fix to SpellingQueryConverter to properly strip out colons in field-specific queries.
+  (Amrit Sarkar via James Dyer)
+
+* SOLR-10264: Fixes multi-term synonym parsing in ManagedSynonymFilterFactory.
+  (J�rg Rathlev, Steve Rowe, Christine Poerschke)
+  
+* SOLR-8807: fix Spellcheck "collateMaxCollectDocs" parameter to work with queries that have the 
+  CollpasingQParserPlugin applied.  (James Dyer)
+
+* SOLR-10474: TestPointFields.testPointFieldReturn() depends on order of unsorted hits. (Steve Rowe)
+
+* SOLR-10473: Correct LBHttpSolrClient's confusing SolrServerException message when timeAllowed is exceeded.
+  (Christine Poerschke)
+
+* SOLR-10047: Mismatched Docvalues segments cause exception in Sorting/Faceting. Solr now uninverts per segment
+  to avoid such exceptions. (Keith Laban via shalin)
+
+* SOLR-10472: Fixed uninversion (aka: FieldCache) bugs with the numeric PointField classes, and CurrencyField (hossman)
+
+* SOLR-5127: Multiple highlight fields and wildcards are now supported e.g. hl.fl=title,text_*
+  (Sven-S. Porst, Daniel Debray, Simon Endele, Christine Poerschke)
+
 Other Changes
 ----------------------
 
@@ -200,6 +263,18 @@ Other Changes
 
 * SOLR-9745: print errors from solr.cmd (Gopikannan Venugopalsamy via Mikhail Khludnev)
 
+* SOLR-10394: Rename getSortWithinGroup to getWithinGroupSort in search.grouping.Command class.
+  (Judith Silverman, Christine Poerschke)
+
+* SOLR-10440: LBHttpSolrClient.doRequest is now always wrapped in a Mapped Diagnostic Context (MDC).
+  (Christine Poerschke)
+
+* SOLR-10429: UpdateRequest#getRoutes()should copy the response parser (noble)
+
+* SOLR-10007: Clean up references to CoreContainer and CoreDescriptors (Erick Erickson)
+
+* SOLR-10151: Use monotonically incrementing counter for doc ids in TestRecovery. (Peter Szantai-Kis, Mano Kovacs via Mark Miller)
+
 ==================  6.5.1 ==================
 
 Bug Fixes
@@ -211,6 +286,29 @@ Bug Fixes
 * SOLR-10416: The JSON output of /admin/metrics is fixed to write the container as a
   map (SimpleOrderedMap) instead of an array (NamedList). (shalin)
 
+* SOLR-10277: On 'downnode', lots of wasteful mutations are done to ZK.
+  (Joshua Humphries, Scott Blum, Varun Thacker, shalin)
+
+* SOLR-10421: Fix params persistence for solr/contrib/ltr (MinMax|Standard)Normalizer classes.
+  (Jianxiong Dong, Christine Poerschke)
+
+* SOLR-10404: The fetch() streaming expression wouldn't work if a value included query syntax chars (like :+-).
+  Fixed, and enhanced the generated query to not pollute the queryCache. (David Smiley)
+  
+* SOLR-10423: Disable graph query production via schema configuration <fieldtype ... enableGraphQueries="false">.
+  This fixes broken queries for ShingleFilter-containing query-time analyzers when request param sow=false.
+  (Steve Rowe)
+
+* SOLR-10425: Fix indexed="false" on numeric PointFields (Tom�s Fern�ndez L�bbe, hossman)
+
+* SOLR-10341: SQL AVG function mis-interprets field type. (Joel Bernstein)
+
+* SOLR-10444: SQL interface does not use client cache. (Joel Bernstein)
+
+* SOLR-10420: Solr 6.x leaking one SolrZkClient instance per second (Scott Blum, Cao Manh Dat, Markus Jelsma, Steve Rowe)
+
+* SOLR-10439: The new 'large' attribute had been forgotten in /schema/fields?showDefaults=true
+
 ==================  6.5.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/analytics/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java
index b22dcb5..1670ad5 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/plugin/AnalyticsStatisticsCollector.java
@@ -16,11 +16,11 @@
  */
 package org.apache.solr.analytics.plugin;
 
+import java.util.HashMap;
+import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
 
 import com.codahale.metrics.Timer;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.util.stats.MetricUtils;
 
 public class AnalyticsStatisticsCollector {
@@ -85,17 +85,20 @@ public class AnalyticsStatisticsCollector {
     currentTimer.stop();
   }
 
-  public NamedList<Object> getStatistics() {
-    NamedList<Object> lst = new SimpleOrderedMap<>();
-    lst.add("requests", numRequests.longValue());
-    lst.add("analyticsRequests", numAnalyticsRequests.longValue());
-    lst.add("statsRequests", numStatsRequests.longValue());
-    lst.add("statsCollected", numCollectedStats.longValue());
-    lst.add("fieldFacets", numFieldFacets.longValue());
-    lst.add("rangeFacets", numRangeFacets.longValue());
-    lst.add("queryFacets", numQueryFacets.longValue());
-    lst.add("queriesInQueryFacets", numQueries.longValue());
-    MetricUtils.addMetrics(lst, requestTimes);
-    return lst;
+  public Map<String, Object> getStatistics() {
+
+    Map<String, Object> map = new HashMap<>();
+    MetricUtils.convertTimer("", requestTimes, MetricUtils.PropertyFilter.ALL, false, false, (k, v) -> {
+      map.putAll((Map<String, Object>)v);
+    });
+    map.put("requests", numRequests.longValue());
+    map.put("analyticsRequests", numAnalyticsRequests.longValue());
+    map.put("statsRequests", numStatsRequests.longValue());
+    map.put("statsCollected", numCollectedStats.longValue());
+    map.put("fieldFacets", numFieldFacets.longValue());
+    map.put("rangeFacets", numRangeFacets.longValue());
+    map.put("queryFacets", numQueryFacets.longValue());
+    map.put("queriesInQueryFacets", numQueries.longValue());
+    return map;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java b/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java
index f33b6c7..505533b 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/handler/component/AnalyticsComponent.java
@@ -22,9 +22,11 @@ import org.apache.solr.analytics.plugin.AnalyticsStatisticsCollector;
 import org.apache.solr.analytics.request.AnalyticsStats;
 import org.apache.solr.analytics.util.AnalyticsParams;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 
-public class AnalyticsComponent extends SearchComponent {
+public class AnalyticsComponent extends SearchComponent implements SolrMetricProducer {
   public static final String COMPONENT_NAME = "analytics";
   private final AnalyticsStatisticsCollector analyticsCollector = new AnalyticsStatisticsCollector();;
 
@@ -80,12 +82,8 @@ public class AnalyticsComponent extends SearchComponent {
   }
 
   @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-
-  @Override
-  public NamedList getStatistics() {
-    return analyticsCollector.getStatistics();
+  public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
+    MetricsMap metrics = new MetricsMap((detailed, map) -> map.putAll(analyticsCollector.getStatistics()));
+    manager.registerGauge(this, registry, metrics, true, getClass().getSimpleName(), getCategory().toString(), scope);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
index 0766c7f..faea3ba 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
@@ -26,12 +26,13 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.ContentStreamBase;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.ContentStream;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.response.RawResponseWriter;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
@@ -74,6 +75,8 @@ public class DataImportHandler extends RequestHandlerBase implements
 
   private String myName = "dataimport";
 
+  private MetricsMap metrics;
+
   private static final String PARAM_WRITER_IMPL = "writerImpl";
   private static final String DEFAULT_WRITER_NAME = "SolrWriter";
 
@@ -260,41 +263,33 @@ public class DataImportHandler extends RequestHandlerBase implements
       };
     }
   }
-  
-  @Override
-  @SuppressWarnings("unchecked")
-  public NamedList getStatistics() {
-    if (importer == null)
-      return super.getStatistics();
 
-    DocBuilder.Statistics cumulative = importer.cumulativeStatistics;
-    SimpleOrderedMap result = new SimpleOrderedMap();
-
-    result.add("Status", importer.getStatus().toString());
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    super.initializeMetrics(manager, registryName, scope);
+    metrics = new MetricsMap((detailed, map) -> {
+      if (importer != null) {
+        DocBuilder.Statistics cumulative = importer.cumulativeStatistics;
 
-    if (importer.docBuilder != null) {
-      DocBuilder.Statistics running = importer.docBuilder.importStatistics;
-      result.add("Documents Processed", running.docCount);
-      result.add("Requests made to DataSource", running.queryCount);
-      result.add("Rows Fetched", running.rowsCount);
-      result.add("Documents Deleted", running.deletedDocCount);
-      result.add("Documents Skipped", running.skipDocCount);
-    }
+        map.put("Status", importer.getStatus().toString());
 
-    result.add(DataImporter.MSG.TOTAL_DOC_PROCESSED, cumulative.docCount);
-    result.add(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, cumulative.queryCount);
-    result.add(DataImporter.MSG.TOTAL_ROWS_EXECUTED, cumulative.rowsCount);
-    result.add(DataImporter.MSG.TOTAL_DOCS_DELETED, cumulative.deletedDocCount);
-    result.add(DataImporter.MSG.TOTAL_DOCS_SKIPPED, cumulative.skipDocCount);
+        if (importer.docBuilder != null) {
+          DocBuilder.Statistics running = importer.docBuilder.importStatistics;
+          map.put("Documents Processed", running.docCount);
+          map.put("Requests made to DataSource", running.queryCount);
+          map.put("Rows Fetched", running.rowsCount);
+          map.put("Documents Deleted", running.deletedDocCount);
+          map.put("Documents Skipped", running.skipDocCount);
+        }
 
-    NamedList requestStatistics = super.getStatistics();
-    if (requestStatistics != null) {
-      for (int i = 0; i < requestStatistics.size(); i++) {
-        result.add(requestStatistics.getName(i), requestStatistics.getVal(i));
+        map.put(DataImporter.MSG.TOTAL_DOC_PROCESSED, cumulative.docCount);
+        map.put(DataImporter.MSG.TOTAL_QUERIES_EXECUTED, cumulative.queryCount);
+        map.put(DataImporter.MSG.TOTAL_ROWS_EXECUTED, cumulative.rowsCount);
+        map.put(DataImporter.MSG.TOTAL_DOCS_DELETED, cumulative.deletedDocCount);
+        map.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, cumulative.skipDocCount);
       }
-    }
-
-    return result;
+    });
+    manager.registerGauge(this, registryName, metrics, true, "importer", getCategory().toString(), scope);
   }
 
   // //////////////////////SolrInfoMBeans methods //////////////////////

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
index 2f5e9b0..a49b4f6 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
@@ -288,8 +288,7 @@ public class DataImporter {
     if (propertyWriterTags.isEmpty()) {
       boolean zookeeper = false;
       if (this.core != null
-          && this.core.getCoreDescriptor().getCoreContainer()
-              .isZooKeeperAware()) {
+          && this.core.getCoreContainer().isZooKeeperAware()) {
         zookeeper = true;
       }
       pw = new PropertyWriter(zookeeper ? "ZKPropertiesWriter"

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
index a3d4756..f6a62aa 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
@@ -314,7 +314,7 @@ public class DocBuilder {
   }
 
   void handleError(String message, Exception e) {
-    if (!dataImporter.getCore().getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
+    if (!dataImporter.getCore().getCoreContainer().isZooKeeperAware()) {
       writer.rollback();
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java
index 2d54872..64a776c 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java
@@ -44,7 +44,7 @@ public class ZKPropertiesWriter extends SimplePropertiesWriter {
   @Override
   public void init(DataImporter dataImporter, Map<String, String> params) {
     super.init(dataImporter, params);    
-    zkClient = dataImporter.getCore().getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+    zkClient = dataImporter.getCore().getCoreContainer().getZkController().getZkClient();
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
index 92e233c..ff31c01 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
@@ -90,8 +90,8 @@ public class MinMaxNormalizer extends Normalizer {
   @Override
   public LinkedHashMap<String,Object> paramsToMap() {
     final LinkedHashMap<String,Object> params = new LinkedHashMap<>(2, 1.0f);
-    params.put("min", min);
-    params.put("max", max);
+    params.put("min", '"'+Float.toString(min)+'"');
+    params.put("max", '"'+Float.toString(max)+'"');
     return params;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
index 7ab525c..57df7b4 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
@@ -82,8 +82,8 @@ public class StandardNormalizer extends Normalizer {
   @Override
   public LinkedHashMap<String,Object> paramsToMap() {
     final LinkedHashMap<String,Object> params = new LinkedHashMap<>(2, 1.0f);
-    params.put("avg", avg);
-    params.put("std", std);
+    params.put("avg", '"'+Float.toString(avg)+'"');
+    params.put("std", '"'+Float.toString(std)+'"');
     return params;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
index 9585a7f..beb5ddf 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
@@ -17,7 +17,6 @@
 package org.apache.solr.ltr.response.transform;
 
 import java.io.IOException;
-import java.lang.invoke.MethodHandles;
 import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
@@ -47,8 +46,6 @@ import org.apache.solr.response.transform.DocTransformer;
 import org.apache.solr.response.transform.TransformerFactory;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.util.SolrPluginUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * This transformer will take care to generate and append in the response the
@@ -65,8 +62,6 @@ import org.slf4j.LoggerFactory;
 
 public class LTRFeatureLoggerTransformerFactory extends TransformerFactory {
 
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
   // used inside fl to specify the format (dense|sparse) of the extracted features
   private static final String FV_FORMAT = "format";
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
index 2b4d570..c5db963 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
@@ -17,7 +17,6 @@
 package org.apache.solr.ltr.search;
 
 import java.io.IOException;
-import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
@@ -46,8 +45,6 @@ import org.apache.solr.search.QParserPlugin;
 import org.apache.solr.search.RankQuery;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.util.SolrPluginUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Plug into solr a rerank model.
@@ -60,8 +57,6 @@ public class LTRQParserPlugin extends QParserPlugin implements ResourceLoaderAwa
   public static final String NAME = "ltr";
   private static Query defaultQuery = new MatchAllDocsQuery();
 
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
   // params for setting custom external info that features can use, like query
   // intent
   static final String EXTERNAL_FEATURE_INFO = "efi.";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
index 055b3bc..794e393 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
@@ -40,6 +40,7 @@ public class TestMinMaxNormalizer {
     final MinMaxNormalizer mmn = (MinMaxNormalizer)n;
     assertEquals(mmn.getMin(), expectedMin, 0.0);
     assertEquals(mmn.getMax(), expectedMax, 0.0);
+    assertEquals("{min=\""+expectedMin+"\", max=\""+expectedMax+"\"}", mmn.paramsToMap().toString());
     return n;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
index 10fa972..1794686 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
@@ -40,6 +40,7 @@ public class TestStandardNormalizer {
     final StandardNormalizer sn = (StandardNormalizer)n;
     assertEquals(sn.getAvg(), expectedAvg, 0.0);
     assertEquals(sn.getStd(), expectedStd, 0.0);
+    assertEquals("{avg=\""+expectedAvg+"\", std=\""+expectedStd+"\"}", sn.paramsToMap().toString());
     return n;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityResourceLoader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityResourceLoader.java b/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityResourceLoader.java
index a659d0c..c83a5a7 100644
--- a/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityResourceLoader.java
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityResourceLoader.java
@@ -16,15 +16,15 @@
  */
 package org.apache.solr.response;
 
-import org.apache.velocity.runtime.resource.loader.ResourceLoader;
-import org.apache.velocity.runtime.resource.Resource;
-import org.apache.velocity.exception.ResourceNotFoundException;
-import org.apache.commons.collections.ExtendedProperties;
-import org.apache.solr.core.SolrResourceLoader;
-
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.commons.collections.ExtendedProperties;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.velocity.exception.ResourceNotFoundException;
+import org.apache.velocity.runtime.resource.Resource;
+import org.apache.velocity.runtime.resource.loader.ResourceLoader;
+
 /**
   * Velocity resource loader wrapper around Solr resource loader
   */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
index 8de5fc9..948452e 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
@@ -89,7 +89,7 @@ public class EmbeddedSolrServer extends SolrClient {
    * Create an EmbeddedSolrServer wrapping a particular SolrCore
    */
   public EmbeddedSolrServer(SolrCore core) {
-    this(core.getCoreDescriptor().getCoreContainer(), core.getName());
+    this(core.getCoreContainer(), core.getName());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
index c62efa6..ac09621 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
@@ -68,7 +68,7 @@ public class CloudUtil {
           
           if (thisCnn != null && thisCnn.equals(cnn)
               && !thisBaseUrl.equals(baseUrl)) {
-            if (cc.getCoreNames().contains(desc.getName())) {
+            if (cc.getLoadedCoreNames().contains(desc.getName())) {
               cc.unload(desc.getName());
             }
             

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
index e7ac5e5..6c28cc6 100644
--- a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
@@ -86,10 +86,9 @@ public class DistributedQueue {
    */
   private final Condition changed = updateLock.newCondition();
 
-  /**
-   * If non-null, the last watcher to listen for child changes.  If null, the in-memory contents are dirty.
-   */
-  private ChildWatcher lastWatcher = null;
+  private boolean isDirty = true;
+
+  private int watcherCount = 0;
 
   public DistributedQueue(SolrZkClient zookeeper, String dir) {
     this(zookeeper, dir, new Overseer.Stats());
@@ -238,10 +237,10 @@ public class DistributedQueue {
     try {
       while (true) {
         try {
-          // We don't need to explicitly set isDirty here; if there is a watcher, it will
-          // see the update and set the bit itself; if there is no watcher we can defer
-          // the update anyway.
+          // Explicitly set isDirty here so that synchronous same-thread calls behave as expected.
+          // This will get set again when the watcher actually fires, but that's ok.
           zookeeper.create(dir + "/" + PREFIX, data, CreateMode.PERSISTENT_SEQUENTIAL, true);
+          isDirty = true;
           return;
         } catch (KeeperException.NoNodeException e) {
           try {
@@ -269,15 +268,25 @@ public class DistributedQueue {
   private String firstChild(boolean remove) throws KeeperException, InterruptedException {
     updateLock.lockInterruptibly();
     try {
-      // If we're not in a dirty state, and we have in-memory children, return from in-memory.
-      if (lastWatcher != null && !knownChildren.isEmpty()) {
-        return remove ? knownChildren.pollFirst() : knownChildren.first();
+      if (!isDirty) {
+        // If we're not in a dirty state...
+        if (!knownChildren.isEmpty()) {
+          // and we have in-memory children, return from in-memory.
+          return remove ? knownChildren.pollFirst() : knownChildren.first();
+        } else {
+          // otherwise there's nothing to return
+          return null;
+        }
       }
 
-      // Try to fetch an updated list of children from ZK.
-      ChildWatcher newWatcher = new ChildWatcher();
+      // Dirty, try to fetch an updated list of children from ZK.
+      // Only set a new watcher if there isn't already a watcher.
+      ChildWatcher newWatcher = (watcherCount == 0) ? new ChildWatcher() : null;
       knownChildren = fetchZkChildren(newWatcher);
-      lastWatcher = newWatcher; // only set after fetchZkChildren returns successfully
+      if (newWatcher != null) {
+        watcherCount++; // watcher was successfully set
+      }
+      isDirty = false;
       if (knownChildren.isEmpty()) {
         return null;
       }
@@ -422,16 +431,25 @@ public class DistributedQueue {
     }
   }
 
-  @VisibleForTesting boolean hasWatcher() throws InterruptedException {
+  @VisibleForTesting int watcherCount() throws InterruptedException {
     updateLock.lockInterruptibly();
     try {
-      return lastWatcher != null;
+      return watcherCount;
     } finally {
       updateLock.unlock();
     }
   }
 
-  private class ChildWatcher implements Watcher {
+  @VisibleForTesting boolean isDirty() throws InterruptedException {
+    updateLock.lockInterruptibly();
+    try {
+      return isDirty;
+    } finally {
+      updateLock.unlock();
+    }
+  }
+
+  @VisibleForTesting class ChildWatcher implements Watcher {
 
     @Override
     public void process(WatchedEvent event) {
@@ -441,10 +459,8 @@ public class DistributedQueue {
       }
       updateLock.lock();
       try {
-        // this watcher is automatically cleared when fired
-        if (lastWatcher == this) {
-          lastWatcher = null;
-        }
+        isDirty = true;
+        watcherCount--;
         // optimistically signal any waiters that the queue may not be empty now, so they can wake up and retry
         changed.signalAll();
       } finally {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
index 6e8dbda..bdbeca9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
@@ -292,7 +292,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
         if (cc.isShutDown()) {
           return;
         } else {
-          throw new SolrException(ErrorCode.SERVER_ERROR, "SolrCore not found:" + coreName + " in " + cc.getCoreNames());
+          throw new SolrException(ErrorCode.SERVER_ERROR, "SolrCore not found:" + coreName + " in " + cc.getLoadedCoreNames());
         }
       }
       MDCLoggingContext.setCore(core);
@@ -332,7 +332,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
           if (!zkController.getCoreContainer().isShutDown())  {
             cancelElection();
             throw new SolrException(ErrorCode.SERVER_ERROR,
-                "SolrCore not found:" + coreName + " in " + cc.getCoreNames());
+                "SolrCore not found:" + coreName + " in " + cc.getLoadedCoreNames());
           } else  {
             return;
           }
@@ -402,7 +402,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
             RefCounted<SolrIndexSearcher> searchHolder = core.getNewestSearcher(false);
             SolrIndexSearcher searcher = searchHolder.get();
             try {
-              log.debug(core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName() + " synched "
+              log.debug(core.getCoreContainer().getZkController().getNodeName() + " synched "
                   + searcher.search(new MatchAllDocsQuery(), 1).totalHits);
             } finally {
               searchHolder.decref();
@@ -462,7 +462,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
           try (SolrCore core = cc.getCore(coreName)) {
             
             if (core == null) {
-              log.debug("SolrCore not found:" + coreName + " in " + cc.getCoreNames());
+              log.debug("SolrCore not found:" + coreName + " in " + cc.getLoadedCoreNames());
               return;
             }
             

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
new file mode 100644
index 0000000..545989e
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.*;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+
+public class MoveReplicaCmd implements Cmd{
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final OverseerCollectionMessageHandler ocmh;
+
+  public MoveReplicaCmd(OverseerCollectionMessageHandler ocmh) {
+    this.ocmh = ocmh;
+  }
+
+  @Override
+  public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+    moveReplica(ocmh.zkStateReader.getClusterState(), message, results);
+  }
+
+  private void moveReplica(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
+    log.info("moveReplica() : {}", Utils.toJSONString(message));
+    ocmh.checkRequired(message, COLLECTION_PROP, "targetNode");
+    String collection = message.getStr(COLLECTION_PROP);
+    String targetNode = message.getStr("targetNode");
+
+    String async = message.getStr(ASYNC);
+
+    DocCollection coll = clusterState.getCollection(collection);
+    if (coll == null) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection: " + collection + " does not exist");
+    }
+    Replica replica = null;
+    if (message.containsKey(REPLICA_PROP)) {
+      String replicaName = message.getStr(REPLICA_PROP);
+      replica = coll.getReplica(replicaName);
+      if (replica == null) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "Collection: " + collection + " replica: " + replicaName + " does not exist");
+      }
+    } else {
+      ocmh.checkRequired(message, SHARD_ID_PROP, "fromNode");
+      String fromNode = message.getStr("fromNode");
+      String shardId = message.getStr(SHARD_ID_PROP);
+      Slice slice = clusterState.getCollection(collection).getSlice(shardId);
+      List<Replica> sliceReplicas = new ArrayList<>(slice.getReplicas());
+      Collections.shuffle(sliceReplicas, RANDOM);
+      for (Replica r : slice.getReplicas()) {
+        if (r.getNodeName().equals(fromNode)) {
+          replica = r;
+        }
+      }
+      if (replica == null) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "Collection: " + collection + " node: " + fromNode + " do not have any replica belong to shard: " + shardId);
+      }
+    }
+
+    log.info("Replica will be moved {}", replica);
+    Slice slice = null;
+    for (Slice s : coll.getSlices()) {
+      if (s.getReplicas().contains(replica)) {
+        slice = s;
+      }
+    }
+    assert slice != null;
+    Object dataDir = replica.get("dataDir");
+    if (dataDir != null && dataDir.toString().startsWith("hdfs:/")) {
+      moveHdfsReplica(clusterState, results, dataDir.toString(), targetNode, async, coll, replica, slice);
+    } else {
+      moveNormalReplica(clusterState, results, targetNode, async, coll, replica, slice);
+    }
+  }
+
+  private void moveHdfsReplica(ClusterState clusterState, NamedList results, String dataDir, String targetNode, String async,
+                                 DocCollection coll, Replica replica, Slice slice) throws Exception {
+    String newCoreName = Assign.buildCoreName(coll, slice.getName());
+
+    ZkNodeProps removeReplicasProps = new ZkNodeProps(
+        COLLECTION_PROP, coll.getName(),
+        SHARD_ID_PROP, slice.getName(),
+        REPLICA_PROP, replica.getName()
+        );
+    removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_DATA_DIR, false);
+    removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_INDEX, false);
+    if(async!=null) removeReplicasProps.getProperties().put(ASYNC, async);
+    NamedList deleteResult = new NamedList();
+    ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, null);
+    if (deleteResult.get("failure") != null) {
+      String errorString = String.format(Locale.ROOT, "Failed to cleanup replica collection=%s shard=%s name=%s",
+          coll.getName(), slice.getName(), replica.getName());
+      log.warn(errorString);
+      results.add("failure", errorString + ", because of : " + deleteResult.get("failure"));
+      return;
+    }
+
+    ZkNodeProps addReplicasProps = new ZkNodeProps(
+        COLLECTION_PROP, coll.getName(),
+        SHARD_ID_PROP, slice.getName(),
+        CoreAdminParams.NODE, targetNode,
+        CoreAdminParams.NAME, newCoreName,
+        CoreAdminParams.DATA_DIR, dataDir);
+    if(async!=null) addReplicasProps.getProperties().put(ASYNC, async);
+    NamedList addResult = new NamedList();
+    ocmh.addReplica(clusterState, addReplicasProps, addResult, null);
+    if (addResult.get("failure") != null) {
+      String errorString = String.format(Locale.ROOT, "Failed to create replica for collection=%s shard=%s" +
+          " on node=%s", coll.getName(), slice.getName(), targetNode);
+      log.warn(errorString);
+      results.add("failure", errorString);
+      return;
+    } else {
+      String successString = String.format(Locale.ROOT, "MOVEREPLICA action completed successfully, moved replica=%s at node=%s " +
+          "to replica=%s at node=%s", replica.getCoreName(), replica.getNodeName(), newCoreName, targetNode);
+      results.add("success", successString);
+    }
+  }
+
+  private void moveNormalReplica(ClusterState clusterState, NamedList results, String targetNode, String async,
+                                 DocCollection coll, Replica replica, Slice slice) throws Exception {
+    String newCoreName = Assign.buildCoreName(coll, slice.getName());
+    ZkNodeProps addReplicasProps = new ZkNodeProps(
+        COLLECTION_PROP, coll.getName(),
+        SHARD_ID_PROP, slice.getName(),
+        CoreAdminParams.NODE, targetNode,
+        CoreAdminParams.NAME, newCoreName);
+    if(async!=null) addReplicasProps.getProperties().put(ASYNC, async);
+    NamedList addResult = new NamedList();
+    ocmh.addReplica(clusterState, addReplicasProps, addResult, null);
+    if (addResult.get("failure") != null) {
+      String errorString = String.format(Locale.ROOT, "Failed to create replica for collection=%s shard=%s" +
+          " on node=%s", coll.getName(), slice.getName(), targetNode);
+      log.warn(errorString);
+      results.add("failure", errorString);
+      return;
+    }
+
+    ZkNodeProps removeReplicasProps = new ZkNodeProps(
+        COLLECTION_PROP, coll.getName(),
+        SHARD_ID_PROP, slice.getName(),
+        REPLICA_PROP, replica.getName());
+    if(async!=null) removeReplicasProps.getProperties().put(ASYNC, async);
+    NamedList deleteResult = new NamedList();
+    ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, null);
+    if (deleteResult.get("failure") != null) {
+      String errorString = String.format(Locale.ROOT, "Failed to cleanup replica collection=%s shard=%s name=%s",
+          coll.getName(), slice.getName(), replica.getName());
+      log.warn(errorString);
+      results.add("failure", errorString + ", because of : " + deleteResult.get("failure"));
+    } else {
+      String successString = String.format(Locale.ROOT, "MOVEREPLICA action completed successfully, moved replica=%s at node=%s " +
+          "to replica=%s at node=%s", replica.getCoreName(), replica.getNodeName(), newCoreName, targetNode);
+      results.add("success", successString);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/Overseer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 4fe0fdb..2abc0f9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -384,7 +384,7 @@ public class Overseer implements Closeable {
             }
             break;
           case DOWNNODE:
-            return new NodeMutator(getZkStateReader()).downNode(clusterState, message);
+            return new NodeMutator().downNode(clusterState, message);
           default:
             throw new RuntimeException("unknown operation:" + operation + " contents:" + message.getProperties());
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
index 162eb4e..65b23cf 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
@@ -215,6 +215,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler
         .put(DELETESHARD, new DeleteShardCmd(this))
         .put(DELETEREPLICA, new DeleteReplicaCmd(this))
         .put(ADDREPLICA, new AddReplicaCmd(this))
+        .put(MOVEREPLICA, new MoveReplicaCmd(this))
         .build()
     ;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
index 2cbc394..496d082 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
@@ -233,7 +233,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
         SolrIndexSearcher searcher = searchHolder.get();
         Directory dir = core.getDirectoryFactory().get(core.getIndexDir(), DirContext.META_DATA, null);
         try {
-          LOG.debug(core.getCoreDescriptor().getCoreContainer()
+          LOG.debug(core.getCoreContainer()
               .getZkController().getNodeName()
               + " replicated "
               + searcher.search(new MatchAllDocsQuery(), 1).totalHits
@@ -641,7 +641,7 @@ public class RecoveryStrategy extends Thread implements Closeable {
       SolrIndexSearcher searcher = searchHolder.get();
       try {
         final int totalHits = searcher.search(new MatchAllDocsQuery(), 1).totalHits;
-        final String nodeName = core.getCoreDescriptor().getCoreContainer().getZkController().getNodeName();
+        final String nodeName = core.getCoreContainer().getZkController().getNodeName();
         LOG.debug("[{}] {} [{} total hits]", nodeName, op, totalHits);
       } finally {
         searchHolder.decref();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
index d7fded9..817b371 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
@@ -55,7 +55,7 @@ public class ReplicateFromLeader {
         if (cc.isShutDown()) {
           return;
         } else {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "SolrCore not found:" + coreName + " in " + cc.getCoreNames());
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "SolrCore not found:" + coreName + " in " + cc.getLoadedCoreNames());
         }
       }
       SolrConfig.UpdateHandlerInfo uinfo = core.getSolrConfig().getUpdateHandlerInfo();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/ZkController.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 9a2a76f..add10e2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -1169,7 +1169,7 @@ public class ZkController {
         MDCLoggingContext.setCore(core);
       }
     } else {
-      MDCLoggingContext.setCoreDescriptor(cd);
+      MDCLoggingContext.setCoreDescriptor(cc, cd);
     }
     try {
       String collection = cd.getCloudDescriptor().getCollectionName();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
index 0036fe1..55fd3ef 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
@@ -19,7 +19,6 @@ package org.apache.solr.cloud.overseer;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -38,44 +37,44 @@ public class NodeMutator {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  public NodeMutator(ZkStateReader zkStateReader) {
-
-  }
-
   public List<ZkWriteCommand> downNode(ClusterState clusterState, ZkNodeProps message) {
-    List<ZkWriteCommand> zkWriteCommands = new ArrayList<ZkWriteCommand>();
+    List<ZkWriteCommand> zkWriteCommands = new ArrayList<>();
     String nodeName = message.getStr(ZkStateReader.NODE_NAME_PROP);
 
     log.debug("DownNode state invoked for node: " + nodeName);
 
     Map<String, DocCollection> collections = clusterState.getCollectionsMap();
     for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
+      String collection = entry.getKey();
       DocCollection docCollection = entry.getValue();
+
       Map<String,Slice> slicesCopy = new LinkedHashMap<>(docCollection.getSlicesMap());
 
-      for (Entry<String,Slice> sliceEntry : slicesCopy.entrySet()) {
-        Slice slice = docCollection.getSlice(sliceEntry.getKey());
-        Map<String,Replica> newReplicas = new HashMap<String,Replica>();
+      boolean needToUpdateCollection = false;
+      for (Entry<String, Slice> sliceEntry : slicesCopy.entrySet()) {
+        Slice slice = sliceEntry.getValue();
+        Map<String, Replica> newReplicas = slice.getReplicasCopy();
 
         Collection<Replica> replicas = slice.getReplicas();
         for (Replica replica : replicas) {
-          Map<String,Object> props = replica.shallowCopy();
           String rNodeName = replica.getNodeName();
           if (rNodeName.equals(nodeName)) {
             log.debug("Update replica state for " + replica + " to " + Replica.State.DOWN.toString());
+            Map<String, Object> props = replica.shallowCopy();
             props.put(ZkStateReader.STATE_PROP, Replica.State.DOWN.toString());
+            Replica newReplica = new Replica(replica.getName(), props);
+            newReplicas.put(replica.getName(), newReplica);
+            needToUpdateCollection = true;
           }
-
-          Replica newReplica = new Replica(replica.getName(), props);
-          newReplicas.put(replica.getName(), newReplica);
         }
 
         Slice newSlice = new Slice(slice.getName(), newReplicas, slice.shallowCopy());
         slicesCopy.put(slice.getName(), newSlice);
-
       }
 
-      zkWriteCommands.add(new ZkWriteCommand(entry.getKey(), docCollection.copyWithSlices(slicesCopy)));
+      if (needToUpdateCollection) {
+        zkWriteCommands.add(new ZkWriteCommand(collection, docCollection.copyWithSlices(slicesCopy)));
+      }
     }
 
     return zkWriteCommands;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
index 1697522..d464863 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
@@ -41,5 +41,10 @@ public class ZkWriteCommand {
   public static ZkWriteCommand noop() {
     return new ZkWriteCommand();
   }
+
+  @Override
+  public String toString() {
+    return getClass().getSimpleName() + ": " + (noop ? "no-op" : name + "=" + collection);
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/cloud/rule/ImplicitSnitch.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/rule/ImplicitSnitch.java b/solr/core/src/java/org/apache/solr/cloud/rule/ImplicitSnitch.java
index 00e27c3..a4e998d 100644
--- a/solr/core/src/java/org/apache/solr/cloud/rule/ImplicitSnitch.java
+++ b/solr/core/src/java/org/apache/solr/cloud/rule/ImplicitSnitch.java
@@ -45,7 +45,7 @@ public class ImplicitSnitch implements CoreAdminHandler.Invocable {
     Map<String, Object> result = new HashMap<>();
     CoreContainer cc = (CoreContainer) req.getContext().get(CoreContainer.class.getName());
     if (req.getParams().getInt(CORES, -1) == 1) {
-      result.put(CORES, cc.getCoreNames().size());
+      result.put(CORES, cc.getLoadedCoreNames().size());
     }
     if (req.getParams().getInt(DISK, -1) == 1) {
       try {


[02/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java
new file mode 100644
index 0000000..94124ad
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.stream.eval;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.collections.map.HashedMap;
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.eval.ConversionEvaluator;
+import org.apache.solr.client.solrj.io.eval.RawValueEvaluator;
+import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
+import org.apache.solr.client.solrj.io.stream.StreamContext;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.junit.Test;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+/**
+ * Test ConversionEvaluators
+ */
+public class ConversionEvaluatorsTest {
+
+
+  StreamFactory factory;
+  Map<String, Object> values;
+
+  public ConversionEvaluatorsTest() {
+    super();
+
+    factory = new StreamFactory();
+    factory.withFunctionName("convert", ConversionEvaluator.class).withFunctionName("raw", RawValueEvaluator.class);
+
+    values = new HashedMap();
+  }
+
+  @Test
+  public void testInvalidExpression() throws Exception {
+
+    StreamEvaluator evaluator;
+
+    try {
+      evaluator = factory.constructEvaluator("convert(inches)");
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      assertTrue(false);
+    } catch (IOException e) {
+      assertTrue(e.getCause().getCause().getMessage().contains("Invalid expression convert(inches) - expecting 3 value but found 1"));
+    }
+
+    try {
+      evaluator = factory.constructEvaluator("convert(inches, yards, 3)");
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      Tuple tuple = new Tuple(new HashMap());
+      evaluator.evaluate(tuple);
+      assertTrue(false);
+    } catch (IOException e) {
+      assertTrue(e.getCause().getCause().getMessage().contains("No conversion available from INCHES to YARDS"));
+    }
+  }
+
+  @Test
+  public void testInches() throws Exception {
+    testFunction("convert(inches, centimeters, 2)", (double)(2*2.54));
+    testFunction("convert(inches, meters, 2)", (double)(2*0.0254));
+    testFunction("convert(inches, millimeters, 2)", (double)(2*25.40));
+  }
+
+  @Test
+  public void testYards() throws Exception {
+    testFunction("convert(yards, meters, 2)", (double)(2*.91));
+    testFunction("convert(yards, kilometers, 2)", (double)(2*.00091));
+  }
+
+  @Test
+  public void testMiles() throws Exception {
+    testFunction("convert(miles, kilometers, 2)", (double)(2*1.61));
+  }
+
+  @Test
+  public void testMillimeters() throws Exception {
+    testFunction("convert(millimeters, inches, 2)", (double)(2*.039));
+  }
+
+  @Test
+  public void testCentimeters() throws Exception {
+    testFunction("convert(centimeters, inches, 2)", (double)(2*.39));
+  }
+
+  @Test
+  public void testMeters() throws Exception {
+    testFunction("convert(meters, feet, 2)", (double)(2*3.28));
+  }
+
+  @Test
+  public void testKiloMeters() throws Exception {
+    testFunction("convert(kilometers, feet, 2)", (double)(2*3280.8));
+    testFunction("convert(kilometers, miles, 2)", (double)(2*.62));
+  }
+
+  public void testFunction(String expression, Number expected) throws Exception {
+    StreamEvaluator evaluator = factory.constructEvaluator(expression);
+    StreamContext streamContext = new StreamContext();
+    evaluator.setStreamContext(streamContext);
+    Object result = evaluator.evaluate(new Tuple(values));
+    assertTrue(result instanceof Number);
+    assertEquals(expected, result);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java
new file mode 100644
index 0000000..8205cea
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java
@@ -0,0 +1,305 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.stream.eval;
+
+import java.io.IOException;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.MonthDay;
+import java.time.YearMonth;
+import java.time.ZoneOffset;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Locale;
+import java.util.Map;
+import java.util.TimeZone;
+
+import org.apache.commons.collections.map.HashedMap;
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorDay;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorDayOfQuarter;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorDayOfYear;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorEpoch;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorHour;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorMinute;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorMonth;
+import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorQuarter;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorSecond;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorWeek;
+import org.apache.solr.client.solrj.io.eval.TemporalEvaluatorYear;
+import org.apache.solr.client.solrj.io.stream.StreamContext;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParser;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.junit.Test;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+
+/**
+ * Tests numeric Date/Time stream evaluators
+ */
+public class TemporalEvaluatorsTest {
+
+
+  StreamFactory factory;
+  Map<String, Object> values;
+
+  public TemporalEvaluatorsTest() {
+    super();
+
+    factory = new StreamFactory();
+
+    factory.withFunctionName(TemporalEvaluatorYear.FUNCTION_NAME,  TemporalEvaluatorYear.class);
+    factory.withFunctionName(TemporalEvaluatorMonth.FUNCTION_NAME, TemporalEvaluatorMonth.class);
+    factory.withFunctionName(TemporalEvaluatorDay.FUNCTION_NAME,   TemporalEvaluatorDay.class);
+    factory.withFunctionName(TemporalEvaluatorDayOfYear.FUNCTION_NAME,   TemporalEvaluatorDayOfYear.class);
+    factory.withFunctionName(TemporalEvaluatorHour.FUNCTION_NAME,   TemporalEvaluatorHour.class);
+    factory.withFunctionName(TemporalEvaluatorMinute.FUNCTION_NAME,   TemporalEvaluatorMinute.class);
+    factory.withFunctionName(TemporalEvaluatorSecond.FUNCTION_NAME,   TemporalEvaluatorSecond.class);
+    factory.withFunctionName(TemporalEvaluatorEpoch.FUNCTION_NAME,   TemporalEvaluatorEpoch.class);
+    factory.withFunctionName(TemporalEvaluatorWeek.FUNCTION_NAME,   TemporalEvaluatorWeek.class);
+    factory.withFunctionName(TemporalEvaluatorQuarter.FUNCTION_NAME,   TemporalEvaluatorQuarter.class);
+    factory.withFunctionName(TemporalEvaluatorDayOfQuarter.FUNCTION_NAME,   TemporalEvaluatorDayOfQuarter.class);
+
+    values = new HashedMap();
+  }
+
+  @Test
+  public void testInvalidExpression() throws Exception {
+
+    StreamEvaluator evaluator;
+
+    try {
+      evaluator = factory.constructEvaluator("week()");
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      assertTrue(false);
+    } catch (IOException e) {
+      assertTrue(e.getCause().getCause().getMessage().contains("Invalid expression week()"));
+    }
+
+    try {
+      evaluator = factory.constructEvaluator("week(a, b)");
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      assertTrue(false);
+    } catch (IOException e) {
+      assertTrue(e.getCause().getCause().getMessage().contains("expecting one value but found 2"));
+    }
+
+    try {
+      evaluator = factory.constructEvaluator("Week()");
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      assertTrue(false);
+    } catch (IOException e) {
+      assertTrue(e.getMessage().contains("Invalid evaluator expression Week() - function 'Week' is unknown"));
+    }
+  }
+
+
+  @Test
+  public void testInvalidValues() throws Exception {
+    StreamEvaluator evaluator = factory.constructEvaluator("year(a)");
+
+
+    try {
+      values.clear();
+      values.put("a", 12);
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      Object result = evaluator.evaluate(new Tuple(values));
+      assertTrue(false);
+    } catch (IOException e) {
+      assertEquals("Invalid parameter 12 - The parameter must be a string formatted ISO_INSTANT or of type Long,Instant,Date,LocalDateTime or TemporalAccessor.", e.getMessage());
+    }
+
+    try {
+      values.clear();
+      values.put("a", "1995-12-31");
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      Object result = evaluator.evaluate(new Tuple(values));
+      assertTrue(false);
+    } catch (IOException e) {
+      assertEquals("Invalid parameter 1995-12-31 - The String must be formatted in the ISO_INSTANT date format.", e.getMessage());
+    }
+
+    try {
+      values.clear();
+      values.put("a", "");
+      StreamContext streamContext = new StreamContext();
+      evaluator.setStreamContext(streamContext);
+      Object result = evaluator.evaluate(new Tuple(values));
+      assertTrue(false);
+    } catch (IOException e) {
+      assertEquals("Invalid parameter  - The parameter must be a string formatted ISO_INSTANT or of type Long,Instant,Date,LocalDateTime or TemporalAccessor.", e.getMessage());
+    }
+
+    values.clear();
+    values.put("a", null);
+    assertNull(evaluator.evaluate(new Tuple(values)));
+  }
+
+  @Test
+  public void testAllFunctions() throws Exception {
+
+    //year, month, day, dayofyear, hour, minute, quarter, week, second, epoch
+    testFunction("year(a)", "1995-12-31T23:59:59Z", 1995);
+    testFunction("month(a)","1995-12-31T23:59:59Z", 12);
+    testFunction("day(a)",  "1995-12-31T23:59:59Z", 31);
+    testFunction("dayOfYear(a)",  "1995-12-31T23:59:59Z", 365);
+    testFunction("dayOfQuarter(a)",  "1995-12-31T23:59:59Z", 92);
+    testFunction("hour(a)",   "1995-12-31T23:59:59Z", 23);
+    testFunction("minute(a)", "1995-12-31T23:59:59Z", 59);
+    testFunction("quarter(a)","1995-12-31T23:59:59Z", 4);
+    testFunction("week(a)",   "1995-12-31T23:59:59Z", 52);
+    testFunction("second(a)", "1995-12-31T23:59:58Z", 58);
+    testFunction("epoch(a)",  "1995-12-31T23:59:59Z", 820454399000l);
+
+    testFunction("year(a)", "2017-03-17T10:30:45Z", 2017);
+    testFunction("year('a')", "2017-03-17T10:30:45Z", 2017);
+    testFunction("month(a)","2017-03-17T10:30:45Z", 3);
+    testFunction("day(a)",  "2017-03-17T10:30:45Z", 17);
+    testFunction("day('a')",  "2017-03-17T10:30:45Z", 17);
+    testFunction("dayOfYear(a)",  "2017-03-17T10:30:45Z", 76);
+    testFunction("dayOfQuarter(a)",  "2017-03-17T10:30:45Z", 76);
+    testFunction("hour(a)",   "2017-03-17T10:30:45Z", 10);
+    testFunction("minute(a)", "2017-03-17T10:30:45Z", 30);
+    testFunction("quarter(a)","2017-03-17T10:30:45Z", 1);
+    testFunction("week(a)",   "2017-03-17T10:30:45Z", 11);
+    testFunction("second(a)", "2017-03-17T10:30:45Z", 45);
+    testFunction("epoch(a)",  "2017-03-17T10:30:45Z", 1489746645000l);
+
+    testFunction("epoch(a)",  new Date(1489746645500l).toInstant().toString(), 1489746645500l);
+    testFunction("epoch(a)",  new Date(820454399990l).toInstant().toString(), 820454399990l);
+
+  }
+
+  @Test
+  public void testFunctionsOnDate() throws Exception {
+    Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT);
+    calendar.set(2017, 12, 5, 23, 59);
+    Date aDate = calendar.getTime();
+    testFunction("year(a)", aDate, calendar.get(Calendar.YEAR));
+    testFunction("month(a)", aDate, calendar.get(Calendar.MONTH)+1);
+    testFunction("day(a)", aDate, calendar.get(Calendar.DAY_OF_MONTH));
+    testFunction("hour(a)", aDate, calendar.get(Calendar.HOUR_OF_DAY));
+    testFunction("minute(a)", aDate, calendar.get(Calendar.MINUTE));
+    testFunction("epoch(a)", aDate, aDate.getTime());
+  }
+
+  @Test
+  public void testFunctionsOnInstant() throws Exception {
+    Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT);
+    calendar.set(2017, 12, 5, 23, 59);
+    Date aDate = calendar.getTime();
+    Instant instant = aDate.toInstant();
+    testFunction("year(a)", instant, calendar.get(Calendar.YEAR));
+    testFunction("month(a)", instant, calendar.get(Calendar.MONTH)+1);
+    testFunction("day(a)", instant, calendar.get(Calendar.DAY_OF_MONTH));
+    testFunction("hour(a)", instant, calendar.get(Calendar.HOUR_OF_DAY));
+    testFunction("minute(a)", instant, calendar.get(Calendar.MINUTE));
+    testFunction("epoch(a)", instant, aDate.getTime());
+  }
+
+  @Test
+  public void testFunctionsLocalDateTime() throws Exception {
+
+    LocalDateTime localDateTime = LocalDateTime.of(2017,12,5, 23, 59);
+    Date aDate = Date.from(localDateTime.atZone(ZoneOffset.UTC).toInstant());
+    testFunction("year(a)", localDateTime, 2017);
+    testFunction("month(a)", localDateTime, 12);
+    testFunction("day(a)", localDateTime, 5);
+    testFunction("hour(a)", localDateTime, 23);
+    testFunction("minute(a)", localDateTime, 59);
+    testFunction("epoch(a)", localDateTime, aDate.getTime());
+  }
+
+  @Test
+  public void testFunctionsOnLong() throws Exception {
+
+    Long longDate = 1512518340000l;
+
+    testFunction("year(a)", longDate, 2017);
+    testFunction("month(a)", longDate, 12);
+    testFunction("day(a)", longDate, 5);
+    testFunction("hour(a)", longDate, 23);
+    testFunction("minute(a)", longDate, 59);
+    testFunction("second(a)", longDate, 0);
+    testFunction("epoch(a)", longDate, longDate);
+
+  }
+
+  @Test
+  public void testLimitedFunctions() throws Exception {
+
+    MonthDay monthDay = MonthDay.of(12,5);
+    testFunction("month(a)", monthDay, 12);
+    testFunction("day(a)", monthDay, 5);
+
+    try {
+      testFunction("year(a)", monthDay, 2017);
+      assertTrue(false);
+    } catch (IOException e) {
+      assertEquals("It is not possible to call 'year' function on java.time.MonthDay", e.getMessage());
+    }
+
+    YearMonth yearMonth = YearMonth.of(2018, 4);
+    testFunction("month(a)", yearMonth, 4);
+    testFunction("year(a)", yearMonth, 2018);
+
+    try {
+      testFunction("day(a)", yearMonth, 5);
+      assertTrue(false);
+    } catch (IOException e) {
+      assertEquals("It is not possible to call 'day' function on java.time.YearMonth", e.getMessage());
+    }
+
+  }
+
+
+  public void testFunction(String expression, Object value, Number expected) throws Exception {
+    StreamEvaluator evaluator = factory.constructEvaluator(expression);
+    StreamContext streamContext = new StreamContext();
+    evaluator.setStreamContext(streamContext);
+    values.clear();
+    values.put("a", value);
+    Object result = evaluator.evaluate(new Tuple(values));
+    assertTrue(result instanceof Number);
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testExplain() throws IOException {
+    StreamExpression express = StreamExpressionParser.parse("month('myfield')");
+    TemporalEvaluatorMonth datePartEvaluator = new TemporalEvaluatorMonth(express,factory);
+    Explanation explain = datePartEvaluator.toExplanation(factory);
+    assertEquals("month(myfield)", explain.getExpression());
+
+    express = StreamExpressionParser.parse("day(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbb)");
+    TemporalEvaluatorDay dayPartEvaluator = new TemporalEvaluatorDay(express,factory);
+    explain = dayPartEvaluator.toExplanation(factory);
+    assertEquals("day(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbb)", explain.getExpression());
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index 0d4cedd..54ab06d 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -33,6 +33,7 @@ import java.lang.annotation.Target;
 import java.lang.invoke.MethodHandles;
 import java.lang.reflect.Method;
 import java.net.MalformedURLException;
+import java.net.ServerSocket;
 import java.net.URL;
 import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
@@ -802,6 +803,19 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
     configString = schemaString = null;
   }
 
+  /**
+   * Find next available local port.
+   * @return available port number or -1 if none could be found
+   * @throws Exception on IO errors
+   */
+  protected static int getNextAvailablePort() throws Exception {
+    int port = -1;
+    try (ServerSocket s = new ServerSocket(0)) {
+      port = s.getLocalPort();
+    }
+    return port;
+  }
+
 
   /** Validates an update XML String is successful
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index ade1c69..48f7670 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -19,7 +19,6 @@ package org.apache.solr.cloud;
 import java.io.File;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.ServerSocket;
 import java.net.URI;
 import java.net.URL;
 import java.nio.file.Path;
@@ -598,14 +597,6 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
     return proxy;
   }
 
-  protected int getNextAvailablePort() throws Exception {
-    int port = -1;
-    try (ServerSocket s = new ServerSocket(0)) {
-      port = s.getLocalPort();
-    }
-    return port;
-  }
-
   private File getRelativeSolrHomePath(File solrHome) {
     final Path solrHomePath = solrHome.toPath();
     final Path curDirPath = new File("").getAbsoluteFile().toPath();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
index 15895d3..0605281 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
@@ -87,6 +87,11 @@ public class MiniSolrCloudCluster {
       "    <int name=\"distribUpdateConnTimeout\">${distribUpdateConnTimeout:45000}</int>\n" +
       "    <int name=\"distribUpdateSoTimeout\">${distribUpdateSoTimeout:340000}</int>\n" +
       "  </solrcloud>\n" +
+      "  <metrics>\n" +
+      "    <reporter name=\"default\" class=\"org.apache.solr.metrics.reporters.SolrJmxReporter\">\n" +
+      "      <str name=\"rootName\">solr_${hostPort:8983}</str>\n" +
+      "    </reporter>\n" +
+      "  </metrics>\n" +
       "  \n" +
       "</solr>\n";
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java b/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
index 3d11ff7..3ad3ce2 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
@@ -47,4 +47,10 @@ public abstract class ReadOnlyCoresLocator implements CoresLocator {
     // no-op
   }
 
+  @Override
+  public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
+    return null; // no-op
+  }
+
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
index 2386681..b8e1899 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
@@ -187,16 +187,16 @@ public class TestHarness extends BaseTestHarness {
         .build();
     if (System.getProperty("zkHost") == null)
       cloudConfig = null;
-    UpdateShardHandlerConfig updateShardHandlerConfig
-        = new UpdateShardHandlerConfig(UpdateShardHandlerConfig.DEFAULT_MAXUPDATECONNECTIONS,
-                                       UpdateShardHandlerConfig.DEFAULT_MAXUPDATECONNECTIONSPERHOST,
-                                       30000, 30000,
-                                        UpdateShardHandlerConfig.DEFAULT_METRICNAMESTRATEGY);
+    UpdateShardHandlerConfig updateShardHandlerConfig = new UpdateShardHandlerConfig(
+        UpdateShardHandlerConfig.DEFAULT_MAXUPDATECONNECTIONS,
+        UpdateShardHandlerConfig.DEFAULT_MAXUPDATECONNECTIONSPERHOST,
+        30000, 30000,
+        UpdateShardHandlerConfig.DEFAULT_METRICNAMESTRATEGY, UpdateShardHandlerConfig.DEFAULT_MAXRECOVERYTHREADS);
     // universal default metric reporter
-    Map<String,String> attributes = new HashMap<>();
+    Map<String,Object> attributes = new HashMap<>();
     attributes.put("name", "default");
     attributes.put("class", SolrJmxReporter.class.getName());
-    PluginInfo defaultPlugin = new PluginInfo("reporter", attributes, null, null);
+    PluginInfo defaultPlugin = new PluginInfo("reporter", attributes);
 
     return new NodeConfig.NodeConfigBuilder("testNode", loader)
         .setUseSchemaCache(Boolean.getBoolean("shareSchema"))
@@ -222,13 +222,19 @@ public class TestHarness extends BaseTestHarness {
 
     @Override
     public List<CoreDescriptor> discover(CoreContainer cc) {
-      return ImmutableList.of(new CoreDescriptor(cc, coreName, cc.getCoreRootDirectory().resolve(coreName),
+      return ImmutableList.of(new CoreDescriptor(coreName, cc.getCoreRootDirectory().resolve(coreName),
+          cc.getContainerProperties(), cc.isZooKeeperAware(),
           CoreDescriptor.CORE_DATADIR, dataDir,
           CoreDescriptor.CORE_CONFIG, solrConfig,
           CoreDescriptor.CORE_SCHEMA, schema,
           CoreDescriptor.CORE_COLLECTION, System.getProperty("collection", "collection1"),
           CoreDescriptor.CORE_SHARD, System.getProperty("shard", "shard1")));
     }
+
+    @Override
+    public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
+      return cd;
+    }
   }
   
   public CoreContainer getCoreContainer() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/webapp/web/css/angular/plugins.css
----------------------------------------------------------------------
diff --git a/solr/webapp/web/css/angular/plugins.css b/solr/webapp/web/css/angular/plugins.css
index 0310e0e..03dc2ea 100644
--- a/solr/webapp/web/css/angular/plugins.css
+++ b/solr/webapp/web/css/angular/plugins.css
@@ -33,6 +33,8 @@ limitations under the License.
 #content #plugins #navigation .PLUGINCHANGES { margin-top: 20px; }
 #content #plugins #navigation .PLUGINCHANGES a { background-image: url( ../../img/ico/eye.png ); }
 #content #plugins #navigation .RELOAD a { background-image: url( ../../img/ico/arrow-circle.png ); }
+#content #plugins #navigation .NOTE { margin-top: 20px; }
+#content #plugins #navigation .NOTE p { color: #c0c0c0; font-style: italic; }
 
 
 #content #plugins #navigation a
@@ -125,14 +127,14 @@ limitations under the License.
 #content #plugins #frame .entry .stats span
 {
   float: left;
-  width: 11%;
+  width: 9%;
 }
 
 #content #plugins #frame .entry dd,
 #content #plugins #frame .entry .stats ul
 {
   float: right;
-  width: 88%;
+  width: 90%;
 }
 
 #content #plugins #frame .entry .stats ul
@@ -144,12 +146,12 @@ limitations under the License.
 
 #content #plugins #frame .entry .stats dt
 {
-  width: 27%;
+  width: 40%;
 }
 
 #content #plugins #frame .entry .stats dd
 {
-  width: 72%;
+  width: 59%;
 }
 
 #content #plugins #frame .entry.expanded a.linker {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/webapp/web/js/angular/controllers/documents.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/controllers/documents.js b/solr/webapp/web/js/angular/controllers/documents.js
index be37c9f..d38265a 100644
--- a/solr/webapp/web/js/angular/controllers/documents.js
+++ b/solr/webapp/web/js/angular/controllers/documents.js
@@ -38,7 +38,6 @@ solrAdminApp.controller('DocumentsController',
             $scope.type = "json";
             $scope.commitWithin = 1000;
             $scope.overwrite = true;
-            $scope.boost = "1.0";
         };
 
         $scope.refresh();
@@ -78,7 +77,6 @@ solrAdminApp.controller('DocumentsController',
             }
 
             params.commitWithin = $scope.commitWithin;
-            params.boost = $scope.boost;
             params.overwrite = $scope.overwrite;
             params.core = $routeParams.core;
             params.wt = "json";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/webapp/web/js/scripts/documents.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/scripts/documents.js b/solr/webapp/web/js/scripts/documents.js
index 9d12e23..45cfbed 100644
--- a/solr/webapp/web/js/scripts/documents.js
+++ b/solr/webapp/web/js/scripts/documents.js
@@ -29,7 +29,6 @@ var content_generator = {
 
 //Utiltity function for turning on/off various elements
 function toggles(documents_form, show_json, show_file, show_doc, doc_text, show_wizard) {
-  var json_only = $('#json-only');
   var the_document = $('#document', documents_form);
   if (show_doc) {
     //console.log("doc: " + doc_text);
@@ -38,11 +37,6 @@ function toggles(documents_form, show_json, show_file, show_doc, doc_text, show_
   } else {
     the_document.hide();
   }
-  if (show_json) {
-    json_only.show();
-  } else {
-    json_only.hide();
-  }
   var file_upload = $('#file-upload', documents_form);
   var upload_only = $('#upload-only', documents_form);
   if (show_file) {
@@ -233,7 +227,6 @@ sammy.get
                       .trigger('change');
                   var the_document = $('#document', documents_form).val();
                   var commit_within = $('#commitWithin', documents_form).val();
-                  var boost = $('#boost', documents_form).val();
                   var overwrite = $('#overwrite', documents_form).val();
                   var the_command = "";
                   var content_type = "";
@@ -245,7 +238,6 @@ sammy.get
                     //create a JSON command
                     the_command = "{"
                         + '"add":{ "doc":' + the_document + ","
-                        + '"boost":' + boost + ","
                         + '"overwrite":' + overwrite + ","
                         + '"commitWithin":' + commit_within
                         + "}}";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/webapp/web/partials/documents.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/documents.html b/solr/webapp/web/partials/documents.html
index 74d034f..2bf3f12 100644
--- a/solr/webapp/web/partials/documents.html
+++ b/solr/webapp/web/partials/documents.html
@@ -88,13 +88,6 @@
             </label>
             <input ng-model="overwrite" type="text" id="overwrite" value="true" title="Overwrite">
           </div>
-          <!-- Boost is json only, since the XML has it embedded -->
-          <div id="json-only" ng-show="type=='json'">
-            <label for="boost">
-              <a rel="help">Boost</a>
-            </label>
-            <input ng-model="boost" type="text" id="boost" value="1.0" title="Document Boost">
-          </div>
         </div>
       </div>
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/webapp/web/partials/plugins.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/plugins.html b/solr/webapp/web/partials/plugins.html
index d95fc9b..bd122a7 100644
--- a/solr/webapp/web/partials/plugins.html
+++ b/solr/webapp/web/partials/plugins.html
@@ -55,8 +55,8 @@ limitations under the License.
         </li>
         <li class="PLUGINCHANGES"><a ng-click="startRecording()">Watch Changes</a></li>
         <li class="RELOAD"><a ng-click="refresh()">Refresh Values</a></li>
+        <li class="NOTE"><p>NOTE: Only selected metrics are shown here. Full metrics can be accessed via /admin/metrics handler.</p></li>
     </ul>
-  
   </div>
 
   <div id="recording" ng-show="isRecording">

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/webapp/web/tpl/documents.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/tpl/documents.html b/solr/webapp/web/tpl/documents.html
index bd953a4..d2a2e0e 100644
--- a/solr/webapp/web/tpl/documents.html
+++ b/solr/webapp/web/tpl/documents.html
@@ -85,13 +85,6 @@
             </label>
             <input type="text" id="overwrite" value="true" title="Overwrite">
           </div>
-          <!-- Boost is json only, since the XML has it embedded -->
-          <div id="json-only">
-            <label for="boost">
-              <a rel="help">Boost</a>
-            </label>
-            <input type="text" id="boost" value="1.0" title="Document Boost">
-          </div>
         </div>
       </div>
 


[10/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
index 75f6c9b..2a4dcc0 100644
--- a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
@@ -20,9 +20,9 @@ import java.nio.file.Path;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.Locale;
 import java.util.Map;
+import java.util.Random;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -30,11 +30,14 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.NoLockFactory;
+import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.cloud.hdfs.HdfsTestUtil;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.handler.SnapShooter;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.store.hdfs.HdfsLocalityReporter;
 import org.apache.solr.util.BadHdfsThreadsFilter;
 import org.apache.solr.util.MockCoreContainer.MockCoreDescriptor;
@@ -175,24 +178,24 @@ public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 {
   public void testLocalityReporter() throws Exception {
     Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster);
     conf.set("dfs.permissions.enabled", "false");
-    
+
+    Random r = random();
     HdfsDirectoryFactory factory = new HdfsDirectoryFactory();
+    SolrMetricManager metricManager = new SolrMetricManager();
+    String registry = TestUtil.randomSimpleString(r, 2, 10);
+    String scope = TestUtil.randomSimpleString(r,2, 10);
     Map<String,String> props = new HashMap<String,String>();
     props.put(HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr");
     props.put(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, "false");
     props.put(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_ENABLE, "false");
     props.put(HdfsDirectoryFactory.LOCALITYMETRICS_ENABLED, "true");
     factory.init(new NamedList<>(props));
-    
-    Iterator<SolrInfoMBean> it = factory.offerMBeans().iterator();
-    it.next(); // skip
-    SolrInfoMBean localityBean = it.next(); // brittle, but it's ok
-    
-    // Make sure we have the right bean.
-    assertEquals("Got the wrong bean: " + localityBean.getName(), "hdfs-locality", localityBean.getName());
-    
+    factory.initializeMetrics(metricManager, registry, scope);
+
+    // get the metrics map for the locality bean
+    MetricsMap metrics = (MetricsMap)metricManager.registry(registry).getMetrics().get("OTHER." + scope + ".hdfsLocality");
     // We haven't done anything, so there should be no data
-    NamedList<?> statistics = localityBean.getStatistics();
+    Map<String,Object> statistics = metrics.getValue();
     assertEquals("Saw bytes that were not written: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), 0l,
         statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL));
     assertEquals(
@@ -210,7 +213,7 @@ public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 {
     
     // no locality because hostname not set
     factory.setHost("bogus");
-    statistics = localityBean.getStatistics();
+    statistics = metrics.getValue();
     assertEquals("Wrong number of total bytes counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL),
         long_bytes, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL));
     assertEquals("Wrong number of total blocks counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL),
@@ -221,7 +224,7 @@ public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 {
         
     // set hostname and check again
     factory.setHost("127.0.0.1");
-    statistics = localityBean.getStatistics();
+    statistics = metrics.getValue();
     assertEquals(
         "Did not count block as local after setting hostname: "
             + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL),

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/MockInfoBean.java b/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
new file mode 100644
index 0000000..dfa94ae
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import com.codahale.metrics.MetricRegistry;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
+
+class MockInfoBean implements SolrInfoBean, SolrMetricProducer {
+  Set<String> metricNames = new HashSet<>();
+  MetricRegistry registry;
+
+  @Override
+  public String getName() {
+    return "mock";
+  }
+
+  @Override
+  public Category getCategory() {
+    return Category.OTHER;
+  }
+
+  @Override
+  public String getDescription() {
+    return "mock";
+  }
+
+  @Override
+  public Set<String> getMetricNames() {
+    return metricNames;
+  }
+
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
+      map.put("Integer", 123);
+      map.put("Double",567.534);
+      map.put("Long", 32352463l);
+      map.put("Short", (short) 32768);
+      map.put("Byte", (byte) 254);
+      map.put("Float", 3.456f);
+      map.put("String","testing");
+      map.put("Object", new Object());
+    });
+    manager.registerGauge(this, registryName, metricsMap, true, getClass().getSimpleName(), getCategory().toString(), scope);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/MockInfoMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/MockInfoMBean.java b/solr/core/src/test/org/apache/solr/core/MockInfoMBean.java
deleted file mode 100644
index e0d566c..0000000
--- a/solr/core/src/test/org/apache/solr/core/MockInfoMBean.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import java.net.URL;
-
-import org.apache.solr.common.util.NamedList;
-
-class MockInfoMBean implements SolrInfoMBean {
-  @Override
-  public String getName() {
-    return "mock";
-  }
-
-  @Override
-  public Category getCategory() {
-    return Category.OTHER;
-  }
-
-  @Override
-  public String getDescription() {
-    return "mock";
-  }
-
-  @Override
-  public URL[] getDocs() {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  @Override
-  public String getVersion() {
-    return "mock";
-  }
-
-  @Override
-  public String getSource() {
-    return "mock";
-  }
-
-  @Override
-  @SuppressWarnings("unchecked")
-  public NamedList getStatistics() {
-    NamedList myList = new NamedList<Integer>();
-    myList.add("Integer", 123);
-    myList.add("Double",567.534);
-    myList.add("Long", 32352463l);
-    myList.add("Short", (short) 32768);
-    myList.add("Byte", (byte) 254);
-    myList.add("Float", 3.456f);
-    myList.add("String","testing");
-    myList.add("Object", new Object());
-    return myList;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java b/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java
index 367870a..bcf6e9f 100644
--- a/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java
@@ -17,6 +17,7 @@
 package org.apache.solr.core;
 
 import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.common.util.NamedList;
@@ -41,6 +42,12 @@ public class MockQuerySenderListenerReqHandler extends RequestHandlerBase {
   }
 
   @Override
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    super.initializeMetrics(manager, registryName, scope);
+    manager.registerGauge(this, registryName, () -> initCounter.intValue(), true, "initCount", getCategory().toString(), scope);
+  }
+
+  @Override
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
     this.req = req;
     this.rsp = rsp;
@@ -51,12 +58,4 @@ public class MockQuerySenderListenerReqHandler extends RequestHandlerBase {
     String result = null;
     return result;
   }
-
-  @Override
-  public NamedList<Object> getStatistics() {
-    NamedList<Object> lst = super.getStatistics();
-    lst.add("initCount", initCounter.intValue());
-    return lst;
-  }
- 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java b/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java
index 9a953e1..3c13645 100644
--- a/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java
+++ b/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java
@@ -16,9 +16,13 @@
  */
 package org.apache.solr.core;
 
+import java.util.Map;
+
+import com.codahale.metrics.Gauge;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.util.stats.MetricUtils;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -30,10 +34,11 @@ public class RequestHandlersTest extends SolrTestCaseJ4 {
 
   @Test
   public void testInitCount() {
-    SolrCore core = h.getCore();
-    SolrRequestHandler handler = core.getRequestHandler( "mock" );
+    String registry = h.getCore().getCoreMetricManager().getRegistryName();
+    SolrMetricManager manager = h.getCoreContainer().getMetricManager();
+    Gauge<Number> g = (Gauge<Number>)manager.registry(registry).getMetrics().get("QUERY.mock.initCount");
     assertEquals("Incorrect init count",
-                 1, handler.getStatistics().get("initCount"));
+                 1, g.getValue().intValue());
   }
 
   @Test
@@ -105,11 +110,11 @@ public class RequestHandlersTest extends SolrTestCaseJ4 {
         "text", "line up and fly directly at the enemy death cannons, clogging them with wreckage!"));
     assertU(commit());
 
-    NamedList updateStats = updateHandler.getStatistics();
-    NamedList termStats = termHandler.getStatistics();
+    Map<String,Object> updateStats = MetricUtils.convertMetrics(updateHandler.getMetricRegistry(), updateHandler.getMetricNames());
+    Map<String,Object> termStats = MetricUtils.convertMetrics(termHandler.getMetricRegistry(), termHandler.getMetricNames());
 
-    Double updateTime = (Double) updateStats.get("avgTimePerRequest");
-    Double termTime = (Double) termStats.get("avgTimePerRequest");
+    Long updateTime = (Long) updateStats.get("UPDATE./update.totalTime");
+    Long termTime = (Long) termStats.get("QUERY./terms.totalTime");
 
     assertFalse("RequestHandlers should not share statistics!", updateTime.equals(termTime));
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java b/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
index 695e869..c042bd6 100644
--- a/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
+++ b/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
@@ -245,10 +245,10 @@ public class SolrCoreTest extends SolrTestCaseJ4 {
     //TEst that SolrInfoMBeans are registered, including SearchComponents
     SolrCore core = h.getCore();
 
-    Map<String, SolrInfoMBean> infoRegistry = core.getInfoRegistry();
+    Map<String, SolrInfoBean> infoRegistry = core.getInfoRegistry();
     assertTrue("infoRegistry Size: " + infoRegistry.size() + " is not greater than: " + 0, infoRegistry.size() > 0);
     //try out some that we know are in the config
-    SolrInfoMBean bean = infoRegistry.get(SpellCheckComponent.COMPONENT_NAME);
+    SolrInfoBean bean = infoRegistry.get(SpellCheckComponent.COMPONENT_NAME);
     assertNotNull("bean not registered", bean);
     //try a default one
     bean = infoRegistry.get(QueryComponent.COMPONENT_NAME);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java b/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
index 7d2f174..bdef535 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java
@@ -215,11 +215,15 @@ public class TestCodecSupport extends SolrTestCaseJ4 {
     assertNull("Unexpected configuration of codec factory for this test. Expecting empty element", 
         config.getNode("codecFactory", false).getFirstChild());
     IndexSchema schema = IndexSchemaFactory.buildIndexSchema("schema_codec.xml", config);
+
+    CoreContainer coreContainer = h.getCoreContainer();
     
     try {
-      c = new SolrCore(new CoreDescriptor(h.getCoreContainer(), newCoreName, testSolrHome.resolve(newCoreName)), 
+      CoreDescriptor cd = new CoreDescriptor(newCoreName, testSolrHome.resolve(newCoreName),
+          coreContainer.getContainerProperties(), coreContainer.isZooKeeperAware());
+      c = new SolrCore(coreContainer, cd,
           new ConfigSet("fakeConfigset", config, schema, null, true));
-      assertNull(h.getCoreContainer().registerCore(newCoreName, c, false, false));
+      assertNull(coreContainer.registerCore(cd, c, false, false));
       h.coreName = newCoreName;
       assertEquals("We are not using the correct core", "solrconfig_codec2.xml", h.getCore().getConfigResource());
       assertU(add(doc("string_f", "foo")));
@@ -227,7 +231,7 @@ public class TestCodecSupport extends SolrTestCaseJ4 {
       assertCompressionMode(SchemaCodecFactory.SOLR_DEFAULT_COMPRESSION_MODE.name(), h.getCore());
     } finally {
       h.coreName = previousCoreName;
-      h.getCoreContainer().unload(newCoreName);
+      coreContainer.unload(newCoreName);
     }
     
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
index d23b8b1..91bbabb 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
@@ -214,7 +214,9 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
 
     final CoreContainer cc = new CoreContainer(SolrXmlConfig.fromString(resourceLoader, CONFIGSETS_SOLR_XML), new Properties(), cl);
     Path corePath = resourceLoader.getInstancePath().resolve("badcore");
-    CoreDescriptor badcore = new CoreDescriptor(cc, "badcore", corePath, "configSet", "nosuchconfigset");
+    CoreDescriptor badcore = new CoreDescriptor("badcore", corePath, cc.getContainerProperties(), cc.isZooKeeperAware(),
+        "configSet", "nosuchconfigset");
+
     cl.add(badcore);
 
     try {
@@ -383,6 +385,12 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
     public List<CoreDescriptor> discover(CoreContainer cc) {
       return cores;
     }
+
+    @Override
+    public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
+      return cd;
+    }
+
   }
 
   @Test
@@ -397,7 +405,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
     CoreContainer cc = init(CONFIGSETS_SOLR_XML);
 
     // check that we have the cores we expect
-    cores = cc.getCoreNames();
+    cores = cc.getLoadedCoreNames();
     assertNotNull("core names is null", cores);
     assertEquals("wrong number of cores", 0, cores.size());
 
@@ -420,7 +428,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
     }
 
     // check that we have the cores we expect
-    cores = cc.getCoreNames();
+    cores = cc.getLoadedCoreNames();
     assertNotNull("core names is null", cores);
     assertEquals("wrong number of cores", 0, cores.size());
 
@@ -467,12 +475,14 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
     System.setProperty("configsets", getFile("solr/configsets").getAbsolutePath());
 
     final CoreContainer cc = new CoreContainer(SolrXmlConfig.fromString(resourceLoader, CONFIGSETS_SOLR_XML), new Properties(), cl);
-    cl.add(new CoreDescriptor(cc, "col_ok", resourceLoader.getInstancePath().resolve("col_ok"), "configSet", "minimal"));
-    cl.add(new CoreDescriptor(cc, "col_bad", resourceLoader.getInstancePath().resolve("col_bad"), "configSet", "bad-mergepolicy"));
+    cl.add(new CoreDescriptor("col_ok", resourceLoader.getInstancePath().resolve("col_ok"),
+        cc.getContainerProperties(), cc.isZooKeeperAware(), "configSet", "minimal"));
+    cl.add(new CoreDescriptor("col_bad", resourceLoader.getInstancePath().resolve("col_bad"),
+        cc.getContainerProperties(), cc.isZooKeeperAware(), "configSet", "bad-mergepolicy"));
     cc.load();
 
     // check that we have the cores we expect
-    cores = cc.getCoreNames();
+    cores = cc.getLoadedCoreNames();
     assertNotNull("core names is null", cores);
     assertEquals("wrong number of cores", 1, cores.size());
     assertTrue("col_ok not found", cores.contains("col_ok"));
@@ -509,7 +519,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
     cc.create("col_bad", ImmutableMap.of());
 
     // check that we have the cores we expect
-    cores = cc.getCoreNames();
+    cores = cc.getLoadedCoreNames();
     assertNotNull("core names is null", cores);
     assertEquals("wrong number of cores", 2, cores.size());
     assertTrue("col_ok not found", cores.contains("col_ok"));
@@ -534,7 +544,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
     }
 
     // check that we have the cores we expect
-    cores = cc.getCoreNames();
+    cores = cc.getLoadedCoreNames();
     assertNotNull("core names is null", cores);
     assertEquals("wrong number of cores", 2, cores.size());
     assertTrue("col_ok not found", cores.contains("col_ok"));
@@ -591,7 +601,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
         col_bad_old_start, getCoreStartTime(cc, "col_bad"));
 
     // check that we have the cores we expect
-    cores = cc.getCoreNames();
+    cores = cc.getLoadedCoreNames();
     assertNotNull("core names is null", cores);
     assertEquals("wrong number of cores", 2, cores.size());
     assertTrue("col_ok not found", cores.contains("col_ok"));
@@ -619,7 +629,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
 
 
     // check that we have the cores we expect
-    cores = cc.getCoreNames();
+    cores = cc.getLoadedCoreNames();
     assertNotNull("core names is null", cores);
     assertEquals("wrong number of cores", 2, cores.size());
     assertTrue("col_ok not found", cores.contains("col_ok"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
index 22020ba..0c05d83 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
@@ -200,7 +200,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
       cc.load();
       // Just check that the proper number of cores are loaded since making the test depend on order would be fragile
       assertEquals("There should only be 3 cores loaded, coreLOS and two coreT? cores",
-          3, cc.getCoreNames().size());
+          3, cc.getLoadedCoreNames().size());
 
       SolrCore c1 = cc.getCore("coreT1");
       assertNotNull("Core T1 should NOT BE NULL", c1);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java
index f841f92..db941f7 100644
--- a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java
+++ b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java
@@ -16,7 +16,10 @@
  */
 package org.apache.solr.core;
 
-import org.apache.solr.core.JmxMonitoredMap.SolrDynamicMBean;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricReporter;
+import org.apache.solr.metrics.reporters.JmxObjectNameFactory;
+import org.apache.solr.metrics.reporters.SolrJmxReporter;
 import org.apache.solr.util.AbstractSolrTestCase;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -29,12 +32,10 @@ import javax.management.AttributeNotFoundException;
 import javax.management.MBeanAttributeInfo;
 import javax.management.MBeanInfo;
 import javax.management.MBeanServer;
-import javax.management.MalformedObjectNameException;
 import javax.management.ObjectInstance;
 import javax.management.ObjectName;
 import java.lang.invoke.MethodHandles;
 import java.lang.management.ManagementFactory;
-import java.util.Hashtable;
 import java.util.Map;
 import java.util.Set;
 
@@ -49,6 +50,8 @@ public class TestJmxIntegration extends AbstractSolrTestCase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static MBeanServer mbeanServer = null;
+  private static JmxObjectNameFactory nameFactory = null;
+  private static String registryName = null;
 
   @BeforeClass
   public static void beforeClass() throws Exception {
@@ -61,25 +64,30 @@ public class TestJmxIntegration extends AbstractSolrTestCase {
 
     initCore("solrconfig.xml", "schema.xml");
 
-    // we should be able to se that the core has JmxIntegration enabled
-    assertTrue("JMX not enabled",
-               h.getCore().getSolrConfig().jmxConfig.enabled);
-    // and we should be able to see that the the monitor map found 
-    // a JMX server to use, which refers to the server we started
-
-    Map registry = h.getCore().getInfoRegistry();
-    assertTrue("info registry is not a JMX monitored map",
-               registry instanceof JmxMonitoredMap);
-    mbeanServer = ((JmxMonitoredMap)registry).getServer();
-
-    assertNotNull("No JMX server found by monitor map",
-                  mbeanServer);
-
-    // NOTE: we can't garuntee that "mbeanServer == platformServer"
-    // the JVM may have mutiple MBean servers funning when the test started
-    // and the contract of not specifying one when configuring solr with
-    // <jmx /> is that it will use whatever the "first" MBean server 
+    // we should be able to see that the core has JmxIntegration enabled
+    registryName = h.getCore().getCoreMetricManager().getRegistryName();
+    SolrMetricManager manager = h.getCoreContainer().getMetricManager();
+    Map<String,SolrMetricReporter> reporters = manager.getReporters(registryName);
+    assertEquals(1, reporters.size());
+    SolrMetricReporter reporter = reporters.values().iterator().next();
+    assertTrue(reporter instanceof SolrJmxReporter);
+    SolrJmxReporter jmx = (SolrJmxReporter)reporter;
+    assertTrue("JMX not enabled", jmx.isActive());
+    // and we should be able to see that the reporter
+    // refers to the JMX server we started
+
+    mbeanServer = jmx.getMBeanServer();
+
+    assertNotNull("No JMX server found in the reporter",
+        mbeanServer);
+
+    // NOTE: we can't guarantee that "mbeanServer == platformServer"
+    // the JVM may have multiple MBean servers running when the test started
+    // and the contract of not specifying one when configuring solr.xml without
+    // agetnId or serviceUrl is that it will use whatever the "first" MBean server
     // returned by the JVM is.
+
+    nameFactory = new JmxObjectNameFactory("default", registryName);
   }
 
   @AfterClass
@@ -93,34 +101,38 @@ public class TestJmxIntegration extends AbstractSolrTestCase {
 
     Set<ObjectInstance> objects = mbeanServer.queryMBeans(null, null);
     assertFalse("No objects found in mbean server", objects
-            .isEmpty());
+        .isEmpty());
     int numDynamicMbeans = 0;
     for (ObjectInstance o : objects) {
-      assertNotNull("Null name on: " + o.toString(), o.getObjectName());
-      MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(o.getObjectName());
-      if (mbeanInfo.getClassName().endsWith(SolrDynamicMBean.class.getName())) {
+      ObjectName name = o.getObjectName();
+      assertNotNull("Null name on: " + o.toString(), name);
+      MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(name);
+      if (name.getDomain().equals("solr")) {
         numDynamicMbeans++;
         MBeanAttributeInfo[] attrs = mbeanInfo.getAttributes();
-        assertTrue("No Attributes found for mbean: " + mbeanInfo, 
-                   0 < attrs.length);
+        if (name.getKeyProperty("name").equals("fetcher")) { // no attributes without active replication
+          continue;
+        }
+        assertTrue("No Attributes found for mbean: " + o.getObjectName() + ", " + mbeanInfo,
+            0 < attrs.length);
         for (MBeanAttributeInfo attr : attrs) {
           // ensure every advertised attribute is gettable
           try {
             Object trash = mbeanServer.getAttribute(o.getObjectName(), attr.getName());
           } catch (javax.management.AttributeNotFoundException e) {
             throw new RuntimeException("Unable to featch attribute for " + o.getObjectName()
-                                       + ": " + attr.getName(), e);
+                + ": " + attr.getName(), e);
           }
         }
       }
     }
-    assertTrue("No SolrDynamicMBeans found", 0 < numDynamicMbeans);
+    assertTrue("No MBeans found", 0 < numDynamicMbeans);
   }
 
   @Test
   public void testJmxUpdate() throws Exception {
 
-    SolrInfoMBean bean = null;
+    SolrInfoBean bean = null;
     // wait until searcher is registered
     for (int i=0; i<100; i++) {
       bean = h.getCore().getInfoRegistry().get("searcher");
@@ -128,18 +140,20 @@ public class TestJmxIntegration extends AbstractSolrTestCase {
       Thread.sleep(250);
     }
     if (bean==null) throw new RuntimeException("searcher was never registered");
-    ObjectName searcher = getObjectName("searcher", bean);
+    ObjectName searcher = nameFactory.createName("gauge", registryName, "SEARCHER.searcher.*");
 
     log.info("Mbeans in server: " + mbeanServer.queryNames(null, null));
 
+    Set<ObjectInstance> objects = mbeanServer.queryMBeans(searcher, null);
     assertFalse("No mbean found for SolrIndexSearcher", mbeanServer.queryMBeans(searcher, null).isEmpty());
 
-    int oldNumDocs =  (Integer)mbeanServer.getAttribute(searcher, "numDocs");
+    ObjectName name = nameFactory.createName("gauge", registryName, "SEARCHER.searcher.numDocs");
+    int oldNumDocs =  (Integer)mbeanServer.getAttribute(name, "Value");
     assertU(adoc("id", "1"));
     assertU("commit", commit());
-    int numDocs = (Integer)mbeanServer.getAttribute(searcher, "numDocs");
+    int numDocs = (Integer)mbeanServer.getAttribute(name, "Value");
     assertTrue("New numDocs is same as old numDocs as reported by JMX",
-            numDocs > oldNumDocs);
+        numDocs > oldNumDocs);
   }
 
   @Test @Ignore("timing problem? https://issues.apache.org/jira/browse/SOLR-2715")
@@ -183,14 +197,4 @@ public class TestJmxIntegration extends AbstractSolrTestCase {
     log.info("After Reload: Size of infoRegistry: " + registrySize + " MBeans: " + newNumberOfObjects);
     assertEquals("Number of registered MBeans is not the same as info registry size", registrySize, newNumberOfObjects);
   }
-
-  private ObjectName getObjectName(String key, SolrInfoMBean infoBean)
-          throws MalformedObjectNameException {
-    Hashtable<String, String> map = new Hashtable<>();
-    map.put("type", key);
-    map.put("id", infoBean.getName());
-    String coreName = h.getCore().getName();
-    return ObjectName.getInstance(("solr" + (null != coreName ? "/" + coreName : "")), map);
-  }
-}
-
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java b/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
deleted file mode 100644
index aa107bc..0000000
--- a/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrConfig.JmxConfiguration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.management.MBeanServerConnection;
-import javax.management.ObjectInstance;
-import javax.management.ObjectName;
-import javax.management.Query;
-import javax.management.remote.JMXConnector;
-import javax.management.remote.JMXConnectorFactory;
-import javax.management.remote.JMXServiceURL;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.net.ServerSocket;
-import java.rmi.registry.LocateRegistry;
-import java.rmi.server.RMIServerSocketFactory;
-import java.util.Set;
-
-import static org.hamcrest.CoreMatchers.allOf;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.instanceOf;
-
-/**
- * Test for JmxMonitoredMap
- *
- *
- * @since solr 1.3
- */
-public class TestJmxMonitoredMap extends LuceneTestCase {
-
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  private int port = 0;
-
-  private JMXConnector connector;
-
-  private MBeanServerConnection mbeanServer;
-
-  private JmxMonitoredMap<String, SolrInfoMBean> monitoredMap;
-
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-    String oldHost = System.getProperty("java.rmi.server.hostname");
-    try {
-      // this stupid sysprop thing is needed, because remote stubs use an
-      // arbitrary local ip to connect
-      // See: http://weblogs.java.net/blog/emcmanus/archive/2006/12/multihomed_comp.html
-      System.setProperty("java.rmi.server.hostname", "127.0.0.1");
-      class LocalhostRMIServerSocketFactory implements RMIServerSocketFactory {
-        ServerSocket socket;
-        
-        @Override
-        public ServerSocket createServerSocket(int port) throws IOException {
-          return socket = new ServerSocket(port);
-        }
-      };
-      LocalhostRMIServerSocketFactory factory = new LocalhostRMIServerSocketFactory();
-      LocateRegistry.createRegistry(0, null, factory);
-      port = factory.socket.getLocalPort();
-      log.info("Using port: " + port);
-      String url = "service:jmx:rmi:///jndi/rmi://127.0.0.1:"+port+"/solrjmx";
-      JmxConfiguration config = new JmxConfiguration(true, null, url, null);
-      monitoredMap = new JmxMonitoredMap<>("", "", "", config);
-      JMXServiceURL u = new JMXServiceURL(url);
-      connector = JMXConnectorFactory.connect(u);
-      mbeanServer = connector.getMBeanServerConnection();
-    } finally {
-      if (oldHost == null) {
-        System.clearProperty("java.rmi.server.hostname");
-      } else {
-        System.setProperty("java.rmi.server.hostname", oldHost);
-      }
-    }
-  }
-
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    try {
-      connector.close();
-    } catch (Exception e) {
-    }
-    super.tearDown();
-  }
-
-  @Test
-  public void testTypeName() throws Exception{
-    MockInfoMBean mock = new MockInfoMBean();
-    monitoredMap.put("mock", mock);
-
-    NamedList dynamicStats = mock.getStatistics();
-    assertTrue(dynamicStats.size() != 0);
-    assertTrue(dynamicStats.get("Integer") instanceof Integer);
-    assertTrue(dynamicStats.get("Double") instanceof Double);
-    assertTrue(dynamicStats.get("Long") instanceof Long);
-    assertTrue(dynamicStats.get("Short") instanceof Short);
-    assertTrue(dynamicStats.get("Byte") instanceof Byte);
-    assertTrue(dynamicStats.get("Float") instanceof Float);
-    assertTrue(dynamicStats.get("String") instanceof String);
-
-    Set<ObjectInstance> objects = mbeanServer.queryMBeans(null, Query.match(
-        Query.attr("name"), Query.value("mock")));
-
-    ObjectName name = objects.iterator().next().getObjectName();
-    assertMBeanTypeAndValue(name, "Integer", Integer.class, 123);
-    assertMBeanTypeAndValue(name, "Double", Double.class, 567.534);
-    assertMBeanTypeAndValue(name, "Long", Long.class, 32352463l);
-    assertMBeanTypeAndValue(name, "Short", Short.class, (short) 32768);
-    assertMBeanTypeAndValue(name, "Byte", Byte.class, (byte) 254);
-    assertMBeanTypeAndValue(name, "Float", Float.class, 3.456f);
-    assertMBeanTypeAndValue(name, "String",String.class, "testing");
-
-  }
-
-  @SuppressWarnings("unchecked")
-  public void assertMBeanTypeAndValue(ObjectName name, String attr, Class type, Object value) throws Exception {
-    assertThat(mbeanServer.getAttribute(name, attr), 
-        allOf(instanceOf(type), equalTo(value))
-    );
-  }
-
-  @Test
-  public void testPutRemoveClear() throws Exception {
-    MockInfoMBean mock = new MockInfoMBean();
-    monitoredMap.put("mock", mock);
-
-
-    Set<ObjectInstance> objects = mbeanServer.queryMBeans(null, Query.match(
-        Query.attr("name"), Query.value("mock")));
-    assertFalse("No MBean for mock object found in MBeanServer", objects
-        .isEmpty());
-
-    monitoredMap.remove("mock");
-    objects = mbeanServer.queryMBeans(null, Query.match(Query.attr("name"),
-        Query.value("mock")));
-    assertTrue("MBean for mock object found in MBeanServer even after removal",
-        objects.isEmpty());
-
-    monitoredMap.put("mock", mock);
-    monitoredMap.put("mock2", mock);
-    objects = mbeanServer.queryMBeans(null, Query.match(Query.attr("name"),
-        Query.value("mock")));
-    assertFalse("No MBean for mock object found in MBeanServer", objects
-        .isEmpty());
-
-    monitoredMap.clear();
-    objects = mbeanServer.queryMBeans(null, Query.match(Query.attr("name"),
-        Query.value("mock")));
-    assertTrue(
-        "MBean for mock object found in MBeanServer even after clear has been called",
-        objects.isEmpty());
-
-  }
-
-  @Test
-  public void testJmxAugmentedSolrInfoMBean() throws Exception {
-    final MockInfoMBean mock = new MockInfoMBean();
-    final String jmxKey = "jmx";
-    final String jmxValue = "jmxValue";
-
-    MockJmxAugmentedSolrInfoMBean mbean = new MockJmxAugmentedSolrInfoMBean(mock) {
-      @Override
-      public NamedList getStatisticsForJmx() {
-        NamedList stats = getStatistics();
-        stats.add(jmxKey, jmxValue);
-        return stats;
-      }
-    };
-    monitoredMap.put("mock", mbean);
-
-    // assert getStatistics called when used as a map.  Note can't use equals here to compare
-    // because getStatistics returns a new Object each time.
-    assertNull(monitoredMap.get("mock").getStatistics().get(jmxKey));
-
-    //  assert getStatisticsForJmx called when used as jmx server
-    Set<ObjectInstance> objects = mbeanServer.queryMBeans(null, Query.match(
-        Query.attr("name"), Query.value("mock")));
-    ObjectName name = objects.iterator().next().getObjectName();
-    assertMBeanTypeAndValue(name, jmxKey, jmxValue.getClass(), jmxValue);
-  }
-
-  private static abstract class MockJmxAugmentedSolrInfoMBean
-      extends SolrInfoMBeanWrapper implements JmxMonitoredMap.JmxAugmentedSolrInfoMBean {
-
-    public MockJmxAugmentedSolrInfoMBean(SolrInfoMBean mbean) {
-      super(mbean);
-    }
-
-    @Override
-    public abstract NamedList getStatisticsForJmx();
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
index 8690e27..411a2bb 100644
--- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
+++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
@@ -58,7 +58,7 @@ public class TestLazyCores extends SolrTestCaseJ4 {
   }
 
   private static CoreDescriptor makeCoreDescriptor(CoreContainer cc, String coreName, String isTransient, String loadOnStartup) {
-    return new CoreDescriptor(cc, coreName, cc.getCoreRootDirectory().resolve(coreName),
+    return new CoreDescriptor(coreName, cc.getCoreRootDirectory().resolve(coreName), cc.getContainerProperties(), false,
         CoreDescriptor.CORE_TRANSIENT, isTransient,
         CoreDescriptor.CORE_LOADONSTARTUP, loadOnStartup);
   }
@@ -372,8 +372,7 @@ public class TestLazyCores extends SolrTestCaseJ4 {
             resp);
 
   }
-
-
+  
   // Make sure that creating a transient core from the admin handler correctly respects the transient limits etc.
   @Test
   public void testCreateTransientFromAdmin() throws Exception {
@@ -496,7 +495,13 @@ public class TestLazyCores extends SolrTestCaseJ4 {
       copyGoodConf("badSchema2", "schema-tiny.xml", "schema.xml");
 
       
-      // This should force a reload of the cores.
+      // Reload the cores and insure that
+      // 1> they pick up the new configs
+      // 2> they don't fail again b/c they still have entries in loadFailure in core container.
+      cc.reload("badConfig1");
+      cc.reload("badConfig2");
+      cc.reload("badSchema1");
+      cc.reload("badSchema2");
       SolrCore bc1 = cc.getCore("badConfig1");;
       SolrCore bc2 = cc.getCore("badConfig2");
       SolrCore bs1 = cc.getCore("badSchema1");
@@ -640,7 +645,7 @@ public class TestLazyCores extends SolrTestCaseJ4 {
   }
 
   public static void checkNotInCores(CoreContainer cc, String... nameCheck) {
-    Collection<String> loadedNames = cc.getCoreNames();
+    Collection<String> loadedNames = cc.getLoadedCoreNames();
     for (String name : nameCheck) {
       assertFalse("core " + name + " was found in the list of cores", loadedNames.contains(name));
     }
@@ -673,8 +678,8 @@ public class TestLazyCores extends SolrTestCaseJ4 {
   }
 
   public static void checkInCores(CoreContainer cc, String... nameCheck) {
-    Collection<String> loadedNames = cc.getCoreNames();
-    
+    Collection<String> loadedNames = cc.getLoadedCoreNames();
+
     assertEquals("There whould be exactly as many loaded cores as loaded names returned. ", 
         loadedNames.size(), nameCheck.length);
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/core/TestSolrDynamicMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrDynamicMBean.java b/solr/core/src/test/org/apache/solr/core/TestSolrDynamicMBean.java
deleted file mode 100644
index eae4e79..0000000
--- a/solr/core/src/test/org/apache/solr/core/TestSolrDynamicMBean.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.core.JmxMonitoredMap.SolrDynamicMBean;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- * Test for JmxMonitoredMap
- *
- *
- * @since solr 1.3
- */
-public class TestSolrDynamicMBean extends LuceneTestCase {
-
-
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-  }
-
-
-  @Test
-  public void testCachedStatsOption() throws Exception{
-    //  SOLR-6747 Add an optional caching option as a workaround for SOLR-6586.
-    
-    SolrInfoMBean solrmbeaninfo = new MockInfoMBean();
-    SolrDynamicMBean sdmbean = new SolrDynamicMBean("", solrmbeaninfo);
-    
-    sdmbean.getMBeanInfo();
-    
-    Object object1 = sdmbean.getAttribute("Object");
-    Object object2 = sdmbean.getAttribute("Object");
-    
-    assertNotSame(object1, object2);
-    
-    sdmbean.getMBeanInfo();
-    
-    Object object12 = sdmbean.getAttribute("Object");
-    Object object22 = sdmbean.getAttribute("Object");
-    
-    assertNotSame(object1, object12);
-    assertNotSame(object2, object22);
-    
-    
-    // test cached stats
-    
-    solrmbeaninfo = new MockInfoMBean();
-    sdmbean = new SolrDynamicMBean("", solrmbeaninfo, true);
-    
-    sdmbean.getMBeanInfo();
-    
-    object1 = sdmbean.getAttribute("Object");
-    object2 = sdmbean.getAttribute("Object");
-    
-    assertEquals(object1, object2);
-    
-    sdmbean.getMBeanInfo();
-    
-    object12 = sdmbean.getAttribute("Object");
-    object22 = sdmbean.getAttribute("Object");
-    
-    assertNotSame(object1, object12);
-    assertNotSame(object2, object22);
-    
-    assertEquals(object12, object22);
-    
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java
index 84e2382..c7622f6 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java
@@ -31,7 +31,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class MBeansHandlerTest extends SolrTestCaseJ4 {
-  
+
   @BeforeClass
   public static void beforeClass() throws Exception {
     initCore("solrconfig.xml", "schema.xml");
@@ -43,26 +43,26 @@ public class MBeansHandlerTest extends SolrTestCaseJ4 {
         CommonParams.QT,"/admin/mbeans",
         "stats","true",
         CommonParams.WT,"xml"
-     ));
+    ));
     List<ContentStream> streams = new ArrayList<>();
     streams.add(new ContentStreamBase.StringStream(xml));
-    
+
     LocalSolrQueryRequest req = lrf.makeRequest(
         CommonParams.QT,"/admin/mbeans",
         "stats","true",
         CommonParams.WT,"xml",
         "diff","true");
     req.setContentStreams(streams);
-    
+
     xml = h.query(req);
     NamedList<NamedList<NamedList<Object>>> diff = SolrInfoMBeanHandler.fromXML(xml);
 
     // The stats bean for SolrInfoMBeanHandler
     NamedList stats = (NamedList)diff.get("ADMIN").get("/admin/mbeans").get("stats");
-    
+
     //System.out.println("stats:"+stats);
     Pattern p = Pattern.compile("Was: (?<was>[0-9]+), Now: (?<now>[0-9]+), Delta: (?<delta>[0-9]+)");
-    String response = stats.get("requests").toString();
+    String response = stats.get("ADMIN./admin/mbeans.requests").toString();
     Matcher m = p.matcher(response);
     if (!m.matches()) {
       fail("Response did not match pattern: " + response);
@@ -96,4 +96,4 @@ public class MBeansHandlerTest extends SolrTestCaseJ4 {
 
     assertTrue("external entity ignored properly", true);
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index 2f84997..eb86b1b 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -33,7 +33,11 @@ import org.junit.Test;
 public class MetricsHandlerTest extends SolrTestCaseJ4 {
   @BeforeClass
   public static void beforeClass() throws Exception {
+
     initCore("solrconfig.xml", "schema.xml");
+    // manually register some metrics in solr.jvm and solr.jetty - TestHarness doesn't init them
+    h.getCoreContainer().getMetricManager().counter(null, "solr.jvm", "foo");
+    h.getCoreContainer().getMetricManager().counter(null, "solr.jetty", "foo");
   }
 
   @Test
@@ -45,9 +49,6 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     NamedList values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
-    assertNotNull(values.get("solr.jetty"));
-    assertNotNull(values.get("solr.jvm"));
-    assertNotNull(values.get("solr.node"));
     NamedList nl = (NamedList) values.get("solr.core.collection1");
     assertNotNull(nl);
     Object o = nl.get("SEARCHER.new.errors");
@@ -124,10 +125,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
-    assertEquals(4, values.size());
-    assertEquals(0, ((NamedList)values.get("solr.jvm")).size());
-    assertEquals(0, ((NamedList)values.get("solr.jetty")).size());
-    assertEquals(0, ((NamedList)values.get("solr.core.collection1")).size());
+    assertEquals(1, values.size());
     assertEquals(11, ((NamedList)values.get("solr.node")).size());
     assertNotNull(values.get("solr.node"));
     values = (NamedList) values.get("solr.node");
@@ -136,21 +134,41 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("CONTAINER.threadPool.coreLoadExecutor.completed"));
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "jvm", "prefix", "CONTAINER.cores"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "prefix", "CONTAINER.cores", "regex", "C.*thread.*completed"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
+    assertNotNull(values.get("solr.node"));
+    values = (NamedList) values.get("solr.node");
+    assertEquals(5, values.size());
+    assertNotNull(values.get("CONTAINER.threadPool.coreContainerWorkExecutor.completed"));
+    assertNotNull(values.get("CONTAINER.threadPool.coreLoadExecutor.completed"));
+
+    resp = new SolrQueryResponse();
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "prefix", "CACHE.core.fieldCache", "property", "entries_count", "compact", "true"), resp);
+    values = resp.getValues();
+    assertNotNull(values.get("metrics"));
+    values = (NamedList) values.get("metrics");
+    assertNotNull(values.get("solr.core.collection1"));
+    values = (NamedList) values.get("solr.core.collection1");
     assertEquals(1, values.size());
-    assertEquals(0, ((NamedList)values.get("solr.jvm")).size());
-    assertNull(values.get("solr.node"));
+    Map m = (Map)values.get("CACHE.core.fieldCache");
+    assertNotNull(m);
+    assertNotNull(m.get("entries_count"));
+
+    resp = new SolrQueryResponse();
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "jvm", "prefix", "CONTAINER.cores"), resp);
+    values = resp.getValues();
+    assertNotNull(values.get("metrics"));
+    values = (NamedList) values.get("metrics");
+    assertEquals(0, values.size());
 
     resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "node", "type", "timer", "prefix", "CONTAINER.cores"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     SimpleOrderedMap map = (SimpleOrderedMap) values.get("metrics");
-    assertEquals(1, map.size());
-    assertEquals(0, ((NamedList)map.get("solr.node")).size());
+    assertEquals(0, map.size());
   }
 
   @Test
@@ -168,4 +186,41 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(o); // counter type
     assertTrue(o instanceof Number);
   }
+
+  @Test
+  public void testPropertyFilter() throws Exception {
+    MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
+
+    SolrQueryResponse resp = new SolrQueryResponse();
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
+        MetricsHandler.COMPACT_PARAM, "true", "group", "core", "prefix", "CACHE.searcher"), resp);
+    NamedList values = resp.getValues();
+    assertNotNull(values.get("metrics"));
+    values = (NamedList) values.get("metrics");
+    NamedList nl = (NamedList) values.get("solr.core.collection1");
+    assertNotNull(nl);
+    assertTrue(nl.size() > 0);
+    nl.forEach((k, v) -> {
+      assertTrue(v instanceof Map);
+      Map map = (Map)v;
+      assertTrue(map.size() > 2);
+    });
+
+    resp = new SolrQueryResponse();
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
+        MetricsHandler.COMPACT_PARAM, "true", "group", "core", "prefix", "CACHE.searcher",
+        "property", "inserts", "property", "size"), resp);
+    values = resp.getValues();
+    values = (NamedList) values.get("metrics");
+    nl = (NamedList) values.get("solr.core.collection1");
+    assertNotNull(nl);
+    assertTrue(nl.size() > 0);
+    nl.forEach((k, v) -> {
+      assertTrue(v instanceof Map);
+      Map map = (Map)v;
+      assertEquals(2, map.size());
+      assertNotNull(map.get("inserts"));
+      assertNotNull(map.get("size"));
+    });
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java b/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
index 7bf4939..c455b69 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
@@ -17,7 +17,6 @@
 package org.apache.solr.handler.admin;
 
 import java.util.List;
-import java.util.Map;
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -68,7 +67,7 @@ public class StatsReloadRaceTest extends SolrTestCaseJ4 {
       boolean isCompleted;
       do {
         if (random.nextBoolean()) {
-          requestMbeans();
+          requestMetrics();
         } else {
           requestCoreStatus();
         }
@@ -106,22 +105,31 @@ public class StatsReloadRaceTest extends SolrTestCaseJ4 {
     return isCompleted;
   }
 
-  private void requestMbeans() throws Exception {
-    String stats = h.query(req(
-        CommonParams.QT, "/admin/mbeans",
-        "stats", "true"));
-
-    NamedList<NamedList<Object>> actualStats = SolrInfoMBeanHandler.fromXML(stats).get("CORE");
-    
-    for (Map.Entry<String, NamedList<Object>> tuple : actualStats) {
-      if (tuple.getKey().contains("earcher")) { // catches "searcher" and "Searcher@345345 blah"
-        NamedList<Object> searcherStats = tuple.getValue();
-        @SuppressWarnings("unchecked")
-        NamedList<Object> statsList = (NamedList<Object>)searcherStats.get("stats");
-        assertEquals("expect to have exactly one indexVersion at "+statsList, 1, statsList.getAll("indexVersion").size());
-        assertTrue(statsList.get("indexVersion") instanceof Long); 
+  private void requestMetrics() throws Exception {
+    SolrQueryResponse rsp = new SolrQueryResponse();
+    String registry = "solr.core." + h.coreName;
+    String key = "SEARCHER.searcher.indexVersion";
+    boolean found = false;
+    int count = 10;
+    while (!found && count-- > 0) {
+      h.getCoreContainer().getRequestHandler("/admin/metrics").handleRequest(
+          req("prefix", "SEARCHER", "registry", registry, "compact", "true"), rsp);
+
+      NamedList values = rsp.getValues();
+      NamedList metrics = (NamedList)values.get("metrics");
+      metrics = (NamedList)metrics.get(registry);
+      // this is not guaranteed to exist right away after core reload - there's a
+      // small window between core load and before searcher metrics are registered
+      // so we may have to check a few times
+      if (metrics.get(key) != null) {
+        found = true;
+        assertTrue(metrics.get(key) instanceof Long);
+        break;
+      } else {
+        Thread.sleep(1000);
       }
     }
+    assertTrue("Key " + key + " not found in registry " + registry, found);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java
index c961a55..2e20dc8 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java
@@ -20,8 +20,10 @@ import java.lang.management.ManagementFactory;
 import java.lang.management.OperatingSystemMXBean;
 import java.util.Arrays;
 
+import com.codahale.metrics.Gauge;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.util.stats.MetricUtils;
 
 
 public class SystemInfoHandlerTest extends LuceneTestCase {
@@ -36,9 +38,11 @@ public class SystemInfoHandlerTest extends LuceneTestCase {
     info.add( "version", os.getVersion() );
     info.add( "arch", os.getArch() );
 
-    // make another using addMXBeanProperties() 
+    // make another using MetricUtils.addMXBeanMetrics()
     SimpleOrderedMap<Object> info2 = new SimpleOrderedMap<>();
-    SystemInfoHandler.addMXBeanProperties( os, OperatingSystemMXBean.class, info2 );
+    MetricUtils.addMXBeanMetrics( os, OperatingSystemMXBean.class, null, (k, v) -> {
+      info2.add(k, ((Gauge)v).getValue());
+    } );
 
     // make sure they got the same thing
     for (String p : Arrays.asList("name", "version", "arch")) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
index 7c4e663..d268a4e 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
@@ -63,11 +63,6 @@ public class ResourceSharingTestComponent extends SearchComponent implements Sol
     return "ResourceSharingTestComponent";
   }
 
-  @Override
-  public String getSource() {
-    return null;
-  }
-
   @SuppressWarnings("unchecked")
   TestObject getTestObj() {
     return this.blob.get();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
index 6506f98..f0b58cd 100644
--- a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
@@ -20,7 +20,9 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.TokenStream;
@@ -35,6 +37,7 @@ import org.apache.solr.common.params.HighlightParams;
 import org.apache.solr.handler.component.HighlightComponent;
 import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.handler.component.SearchComponent;
+import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.search.DocSet;
@@ -868,6 +871,8 @@ public class HighlighterTest extends SolrTestCaseJ4 {
         "text", "test", // static not stored
         "foo_s", "test", // dynamic stored
         "foo_sI", "test", // dynamic not stored
+        "bar_s", "test", // dynamic stored
+        "bar_sI", "test", // dynamic not stored
         "weight", "1.0")); // stored but not text
     assertU(commit());
     assertU(optimize());
@@ -898,6 +903,21 @@ public class HighlighterTest extends SolrTestCaseJ4 {
     assertEquals("Expected to highlight on field \"foo_s\"", "foo_s",
         highlightFieldNames.get(0));
     request.close();
+
+    // SOLR-5127
+    args.put("hl.fl", (random().nextBoolean() ? "foo_*,bar_*" : "bar_*,foo_*"));
+    lrf = h.getRequestFactory("standard", 0, 10, args);
+    // hl.fl ordering need not be preserved in output
+    final Set<String> highlightedSetExpected = new HashSet<String>();
+    highlightedSetExpected.add("foo_s");
+    highlightedSetExpected.add("bar_s");
+    try (LocalSolrQueryRequest localRequest = lrf.makeRequest("test")) {
+      highlighter = HighlightComponent.getHighlighter(h.getCore());
+      final Set<String> highlightedSetActual = new HashSet<String>(
+          Arrays.asList(highlighter.getHighlightFields(null,
+              localRequest, new String[] {})));
+      assertEquals(highlightedSetExpected, highlightedSetActual);
+    }
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java b/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java
index 3f25464..3862fa6 100644
--- a/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java
+++ b/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java
@@ -99,6 +99,19 @@ public class TestPostingsSolrHighlighter extends SolrTestCaseJ4 {
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text3']/str='crappier <em>document</em>'");
   }
   
+  // SOLR-5127
+  public void testMultipleFieldsViaWildcard() {
+    assertQ("highlighting text and text3*",
+        req("q", (random().nextBoolean() ? "text:document text3:document" : "text3:document text:document"),
+            "sort", "id asc", "hl", "true",
+            "hl.fl", (random().nextBoolean() ? "text,text3*" : "text3*,text")),
+        "count(//lst[@name='highlighting']/*)=2",
+        "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='<em>document</em> one'",
+        "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/str='crappy <em>document</em>'",
+        "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second <em>document</em>'",
+        "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text3']/str='crappier <em>document</em>'");
+  }
+
   public void testMisconfiguredField() {
     ignoreException("was indexed without offsets");
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
index 2f7a003..ad1ca03 100644
--- a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
+++ b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
@@ -179,6 +179,19 @@ public class TestUnifiedSolrHighlighter extends SolrTestCaseJ4 {
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text3']/str='crappier <em>document</em>'");
   }
 
+  // SOLR-5127
+  public void testMultipleFieldsViaWildcard() {
+    assertQ("highlighting text and text3*",
+        req("q", (random().nextBoolean() ? "text:document text3:document" : "text3:document text:document"),
+            "sort", "id asc", "hl", "true",
+            "hl.fl", (random().nextBoolean() ? "text,text3*" : "text3*,text")),
+        "count(//lst[@name='highlighting']/*)=2",
+        "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='<em>document</em> one'",
+        "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/str='crappy <em>document</em>'",
+        "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second <em>document</em>'",
+        "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text3']/str='crappier <em>document</em>'");
+  }
+
   public void testTags() {
     assertQ("different pre/post tags", 
         req("q", "text:document", "sort", "id asc", "hl", "true", "hl.tag.pre", "[", "hl.tag.post", "]"),

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java b/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java
index 72adc68..020fe5e 100644
--- a/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java
@@ -16,13 +16,18 @@
  */
 package org.apache.solr.metrics;
 
-import javax.management.MBeanServer;
-import java.lang.management.ManagementFactory;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
 import java.util.Map;
 
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Metric;
+import org.apache.commons.io.FileUtils;
 import org.apache.solr.SolrJettyTestBase;
+import org.apache.solr.core.NodeConfig;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.core.SolrXmlConfig;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -31,26 +36,103 @@ import org.junit.Test;
  */
 public class JvmMetricsTest extends SolrJettyTestBase {
 
+  static final String[] STRING_OS_METRICS = {
+      "arch",
+      "name",
+      "version"
+  };
+  static final String[] NUMERIC_OS_METRICS = {
+      "availableProcessors",
+      "systemLoadAverage"
+  };
+
+  static final String[] BUFFER_METRICS = {
+      "direct.Count",
+      "direct.MemoryUsed",
+      "direct.TotalCapacity",
+      "mapped.Count",
+      "mapped.MemoryUsed",
+      "mapped.TotalCapacity"
+  };
+
   @BeforeClass
   public static void beforeTest() throws Exception {
     createJetty(legacyExampleCollection1SolrHome());
   }
 
   @Test
-  public void testOperatingSystemMetricsSet() throws Exception {
-    MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
-    OperatingSystemMetricSet set = new OperatingSystemMetricSet(mBeanServer);
+  public void testOperatingSystemMetricSet() throws Exception {
+    OperatingSystemMetricSet set = new OperatingSystemMetricSet();
     Map<String, Metric> metrics = set.getMetrics();
     assertTrue(metrics.size() > 0);
-    for (String metric : OperatingSystemMetricSet.METRICS) {
+    for (String metric : NUMERIC_OS_METRICS) {
       Gauge<?> gauge = (Gauge<?>)metrics.get(metric);
-      if (gauge == null || gauge.getValue() == null) { // some are optional depending on OS
-        continue;
-      }
+      assertNotNull(metric, gauge);
       double value = ((Number)gauge.getValue()).doubleValue();
       // SystemLoadAverage on Windows may be -1.0
       assertTrue("unexpected value of " + metric + ": " + value, value >= 0 || value == -1.0);
     }
+    for (String metric : STRING_OS_METRICS) {
+      Gauge<?> gauge = (Gauge<?>)metrics.get(metric);
+      assertNotNull(metric, gauge);
+      String value = (String)gauge.getValue();
+      assertNotNull(value);
+      assertFalse(value.isEmpty());
+    }
+  }
+
+  @Test
+  public void testAltBufferPoolMetricSet() throws Exception {
+    AltBufferPoolMetricSet set = new AltBufferPoolMetricSet();
+    Map<String, Metric> metrics = set.getMetrics();
+    assertTrue(metrics.size() > 0);
+    for (String name : BUFFER_METRICS) {
+      assertNotNull(name, metrics.get(name));
+      Object g = metrics.get(name);
+      assertTrue(g instanceof Gauge);
+      Object v = ((Gauge)g).getValue();
+      assertTrue(v instanceof Long);
+    }
+  }
+
+  @Test
+  public void testSystemProperties() throws Exception {
+    if (System.getProperty("basicauth") == null) {
+      // make sure it's set
+      System.setProperty("basicauth", "foo:bar");
+    }
+    SolrMetricManager metricManager = jetty.getCoreContainer().getMetricManager();
+    Map<String,Metric> metrics = metricManager.registry("solr.jvm").getMetrics();
+    MetricsMap map = (MetricsMap)metrics.get("system.properties");
+    assertNotNull(map);
+    Map<String,Object> values = map.getValue();
+    System.getProperties().forEach((k, v) -> {
+      if (NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS.contains(k)) {
+        assertNull("hidden property " + k + " present!", values.get(k));
+      } else {
+        assertEquals(v, values.get(String.valueOf(k)));
+      }
+    });
+  }
+
+  @Test
+  public void testHiddenSysProps() throws Exception {
+    Path home = Paths.get(TEST_HOME());
+    SolrResourceLoader loader = new SolrResourceLoader(home);
+
+    // default config
+    String solrXml = FileUtils.readFileToString(Paths.get(home.toString(), "solr.xml").toFile(), "UTF-8");
+    NodeConfig config = SolrXmlConfig.fromString(loader, solrXml);
+    NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS.forEach(s -> {
+      assertTrue(s, config.getHiddenSysProps().contains(s));
+    });
+
+    // custom config
+    solrXml = FileUtils.readFileToString(Paths.get(home.toString(), "solr-hiddensysprops.xml").toFile(), "UTF-8");
+    NodeConfig config2 = SolrXmlConfig.fromString(loader, solrXml);
+    Arrays.asList("foo", "bar", "baz").forEach(s -> {
+      assertTrue(s, config2.getHiddenSysProps().contains(s));
+    });
   }
 
   @Test
@@ -64,5 +146,6 @@ public class JvmMetricsTest extends SolrJettyTestBase {
     assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("gc.")).count() > 0);
     assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("memory.")).count() > 0);
     assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("threads.")).count() > 0);
+    assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("system.")).count() > 0);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
index 6e8e1e5..3001e0c 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
@@ -29,7 +29,7 @@ import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.reporters.MockMetricReporter;
 import org.apache.solr.schema.FieldType;
 import org.junit.After;
@@ -46,7 +46,7 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
   public void beforeTest() throws Exception {
     initCore("solrconfig-basic.xml", "schema.xml");
     coreMetricManager = h.getCore().getCoreMetricManager();
-    metricManager = h.getCore().getCoreDescriptor().getCoreContainer().getMetricManager();
+    metricManager = h.getCore().getCoreContainer().getMetricManager();
   }
 
   @After
@@ -61,7 +61,7 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
     Random random = random();
 
     String scope = SolrMetricTestUtils.getRandomScope(random);
-    SolrInfoMBean.Category category = SolrMetricTestUtils.getRandomCategory(random);
+    SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random);
     Map<String, Counter> metrics = SolrMetricTestUtils.getRandomMetrics(random);
     SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics);
     try {
@@ -82,7 +82,7 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
 
     Map<String, Counter> registered = new HashMap<>();
     String scope = SolrMetricTestUtils.getRandomScope(random, true);
-    SolrInfoMBean.Category category = SolrMetricTestUtils.getRandomCategory(random, true);
+    SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random, true);
 
     int iterations = TestUtil.nextInt(random, 0, MAX_ITERATIONS);
     for (int i = 0; i < iterations; ++i) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
index 1c29c5e..d306119 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
@@ -29,7 +29,7 @@ import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.metrics.reporters.MockMetricReporter;
 import org.junit.Test;
@@ -62,10 +62,10 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
     String toName = "to-" + TestUtil.randomSimpleString(r, 1, 10);
     // register test metrics
     for (Map.Entry<String, Counter> entry : metrics1.entrySet()) {
-      metricManager.register(fromName, entry.getValue(), false, entry.getKey(), "metrics1");
+      metricManager.register(null, fromName, entry.getValue(), false, entry.getKey(), "metrics1");
     }
     for (Map.Entry<String, Counter> entry : metrics2.entrySet()) {
-      metricManager.register(toName, entry.getValue(), false, entry.getKey(), "metrics2");
+      metricManager.register(null, toName, entry.getValue(), false, entry.getKey(), "metrics2");
     }
     assertEquals(metrics1.size(), metricManager.registry(fromName).getMetrics().size());
     assertEquals(metrics2.size(), metricManager.registry(toName).getMetrics().size());
@@ -125,13 +125,13 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
     String registryName = TestUtil.randomSimpleString(r, 1, 10);
 
     for (Map.Entry<String, Counter> entry : metrics.entrySet()) {
-      metricManager.register(registryName, entry.getValue(), false, entry.getKey(), "foo", "bar");
+      metricManager.register(null, registryName, entry.getValue(), false, entry.getKey(), "foo", "bar");
     }
     for (Map.Entry<String, Counter> entry : metrics.entrySet()) {
-      metricManager.register(registryName, entry.getValue(), false, entry.getKey(), "foo", "baz");
+      metricManager.register(null, registryName, entry.getValue(), false, entry.getKey(), "foo", "baz");
     }
     for (Map.Entry<String, Counter> entry : metrics.entrySet()) {
-      metricManager.register(registryName, entry.getValue(), false, entry.getKey(), "foo");
+      metricManager.register(null, registryName, entry.getValue(), false, entry.getKey(), "foo");
     }
 
     assertEquals(metrics.size() * 3, metricManager.registry(registryName).getMetrics().size());
@@ -163,10 +163,10 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
 
     String registryName = TestUtil.randomSimpleString(r, 1, 10);
 
-    metricManager.counter(registryName, "simple_counter", "foo", "bar");
-    metricManager.timer(registryName, "simple_timer", "foo", "bar");
-    metricManager.meter(registryName, "simple_meter", "foo", "bar");
-    metricManager.histogram(registryName, "simple_histogram", "foo", "bar");
+    metricManager.counter(null, registryName, "simple_counter", "foo", "bar");
+    metricManager.timer(null, registryName, "simple_timer", "foo", "bar");
+    metricManager.meter(null, registryName, "simple_meter", "foo", "bar");
+    metricManager.histogram(null, registryName, "simple_histogram", "foo", "bar");
     Map<String, Metric> metrics = metricManager.registry(registryName).getMetrics();
     assertEquals(4, metrics.size());
     for (Map.Entry<String, Metric> entry : metrics.entrySet()) {
@@ -180,13 +180,13 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
 
     String name = TestUtil.randomSimpleString(r, 1, 10);
 
-    String result = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, name, "collection1");
+    String result = SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, name, "collection1");
     assertEquals("solr.core." + name + ".collection1", result);
     // try it with already prefixed name - group will be ignored
-    result = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, result);
+    result = SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, result);
     assertEquals("solr.core." + name + ".collection1", result);
     // try it with already prefixed name but with additional segments
-    result = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, result, "shard1", "replica1");
+    result = SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, result, "shard1", "replica1");
     assertEquals("solr.core." + name + ".collection1.shard1.replica1", result);
   }
 
@@ -206,18 +206,18 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
         createPluginInfo("core_foo", "core", null)
     };
     String tag = "xyz";
-    metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.node);
+    metricManager.loadReporters(plugins, loader, tag, SolrInfoBean.Group.node);
     Map<String, SolrMetricReporter> reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
+        SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
     assertEquals(4, reporters.size());
     assertTrue(reporters.containsKey("universal_foo@" + tag));
     assertTrue(reporters.containsKey("multigroup_foo@" + tag));
     assertTrue(reporters.containsKey("node_foo@" + tag));
     assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
 
-    metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.core, "collection1");
+    metricManager.loadReporters(plugins, loader, tag, SolrInfoBean.Group.core, "collection1");
     reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, "collection1"));
+        SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1"));
     assertEquals(5, reporters.size());
     assertTrue(reporters.containsKey("universal_foo@" + tag));
     assertTrue(reporters.containsKey("multigroup_foo@" + tag));
@@ -225,26 +225,26 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
     assertTrue(reporters.containsKey("core_foo@" + tag));
     assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
 
-    metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.jvm);
+    metricManager.loadReporters(plugins, loader, tag, SolrInfoBean.Group.jvm);
     reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm));
+        SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
     assertEquals(2, reporters.size());
     assertTrue(reporters.containsKey("universal_foo@" + tag));
     assertTrue(reporters.containsKey("multigroup_foo@" + tag));
 
     metricManager.removeRegistry("solr.jvm");
     reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm));
+        SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
     assertEquals(0, reporters.size());
 
     metricManager.removeRegistry("solr.node");
     reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
+        SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
     assertEquals(0, reporters.size());
 
     metricManager.removeRegistry("solr.core.collection1");
     reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, "collection1"));
+        SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1"));
     assertEquals(0, reporters.size());
 
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java
index b275919..f3359cc 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java
@@ -42,6 +42,7 @@ public class SolrMetricReporterTest extends LuceneTestCase {
     Map<String, Object> attrs = new HashMap<>();
     attrs.put(FieldType.CLASS_NAME, MockMetricReporter.class.getName());
     attrs.put(CoreAdminParams.NAME, TestUtil.randomUnicodeString(random));
+    attrs.put("enabled", random.nextBoolean());
 
     boolean shouldDefineConfigurable = random.nextBoolean();
     String configurable = TestUtil.randomUnicodeString(random);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java
index 6bd6500..98fc9b1 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java
@@ -23,12 +23,12 @@ import java.util.Random;
 
 import com.codahale.metrics.Counter;
 import org.apache.lucene.util.TestUtil;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 
 public final class SolrMetricTestUtils {
 
   private static final int                    MAX_ITERATIONS = 100;
-  private static final SolrInfoMBean.Category CATEGORIES[]   = SolrInfoMBean.Category.values();
+  private static final SolrInfoBean.Category CATEGORIES[]   = SolrInfoBean.Category.values();
 
   public static String getRandomScope(Random random) {
     return getRandomScope(random, random.nextBoolean());
@@ -38,11 +38,11 @@ public final class SolrMetricTestUtils {
     return shouldDefineScope ? TestUtil.randomSimpleString(random, 1, 10) : null; // must be simple string for JMX publishing
   }
 
-  public static SolrInfoMBean.Category getRandomCategory(Random random) {
+  public static SolrInfoBean.Category getRandomCategory(Random random) {
     return getRandomCategory(random, random.nextBoolean());
   }
 
-  public static SolrInfoMBean.Category getRandomCategory(Random random, boolean shouldDefineCategory) {
+  public static SolrInfoBean.Category getRandomCategory(Random random, boolean shouldDefineCategory) {
     return shouldDefineCategory ? CATEGORIES[TestUtil.nextInt(random, 0, CATEGORIES.length - 1)] : null;
   }
 
@@ -75,7 +75,7 @@ public final class SolrMetricTestUtils {
     return metrics;
   }
 
-  public static SolrMetricProducer getProducerOf(SolrMetricManager metricManager, SolrInfoMBean.Category category, String scope, Map<String, Counter> metrics) {
+  public static SolrMetricProducer getProducerOf(SolrMetricManager metricManager, SolrInfoBean.Category category, String scope, Map<String, Counter> metrics) {
     return new SolrMetricProducer() {
       @Override
       public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
@@ -86,7 +86,7 @@ public final class SolrMetricTestUtils {
           return;
         }
         for (Map.Entry<String, Counter> entry : metrics.entrySet()) {
-          manager.counter(registry, entry.getKey(), category.toString(), scope);
+          manager.counter(null, registry, entry.getKey(), category.toString(), scope);
         }
       }
 


[07/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
index 0034b13..72fc9ce 100644
--- a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
@@ -17,12 +17,13 @@
 package org.apache.solr.search;
 
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.common.util.NamedList;
+import org.apache.lucene.util.TestUtil;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.util.ConcurrentLRUCache;
 import org.apache.solr.util.RTimer;
 
 import java.io.IOException;
-import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
@@ -37,9 +38,14 @@ import java.util.concurrent.atomic.AtomicInteger;
  * @since solr 1.4
  */
 public class TestFastLRUCache extends LuceneTestCase {
-  
+  SolrMetricManager metricManager = new SolrMetricManager();
+  String registry = TestUtil.randomSimpleString(random(), 2, 10);
+  String scope = TestUtil.randomSimpleString(random(), 2, 10);
+
   public void testPercentageAutowarm() throws IOException {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
+    fastCache.initializeMetrics(metricManager, registry, scope);
+    MetricsMap metrics = fastCache.getMetricsMap();
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
     params.put("initialSize", "10");
@@ -52,12 +58,14 @@ public class TestFastLRUCache extends LuceneTestCase {
     }
     assertEquals("25", fastCache.get(25));
     assertEquals(null, fastCache.get(110));
-    NamedList<Serializable> nl = fastCache.getStatistics();
+    Map<String,Object> nl = metrics.getValue();
     assertEquals(2L, nl.get("lookups"));
     assertEquals(1L, nl.get("hits"));
     assertEquals(101L, nl.get("inserts"));
     assertEquals(null, fastCache.get(1));  // first item put in should be the first out
     FastLRUCache<Object, Object> fastCacheNew = new FastLRUCache<>();
+    fastCacheNew.initializeMetrics(metricManager, registry, scope);
+    metrics = fastCacheNew.getMetricsMap();
     fastCacheNew.init(params, o, cr);
     fastCacheNew.warm(null, fastCache);
     fastCacheNew.setState(SolrCache.State.LIVE);
@@ -65,7 +73,7 @@ public class TestFastLRUCache extends LuceneTestCase {
     fastCacheNew.put(103, "103");
     assertEquals("90", fastCacheNew.get(90));
     assertEquals("50", fastCacheNew.get(50));
-    nl = fastCacheNew.getStatistics();
+    nl = metrics.getValue();
     assertEquals(2L, nl.get("lookups"));
     assertEquals(2L, nl.get("hits"));
     assertEquals(1L, nl.get("inserts"));
@@ -86,6 +94,7 @@ public class TestFastLRUCache extends LuceneTestCase {
   
   private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
+    fastCache.initializeMetrics(metricManager, registry, scope);
     Map<String, String> params = new HashMap<>();
     params.put("size", String.valueOf(limit));
     params.put("initialSize", "10");
@@ -98,6 +107,7 @@ public class TestFastLRUCache extends LuceneTestCase {
     }
 
     FastLRUCache<Object, Object> fastCacheNew = new FastLRUCache<>();
+    fastCacheNew.initializeMetrics(metricManager, registry, scope);
     fastCacheNew.init(params, o, cr);
     fastCacheNew.warm(null, fastCache);
     fastCacheNew.setState(SolrCache.State.LIVE);
@@ -110,7 +120,7 @@ public class TestFastLRUCache extends LuceneTestCase {
     for(int miss:misses) {
       assertEquals("The value " + miss + " should NOT be on new cache", null, fastCacheNew.get(miss));
     }
-    NamedList<Serializable> nl = fastCacheNew.getStatistics();
+    Map<String,Object> nl = fastCacheNew.getMetricsMap().getValue();
     assertEquals(Long.valueOf(hits.length + misses.length), nl.get("lookups"));
     assertEquals(Long.valueOf(hits.length), nl.get("hits"));
     fastCacheNew.close();
@@ -118,6 +128,7 @@ public class TestFastLRUCache extends LuceneTestCase {
   
   public void testNoAutowarm() throws IOException {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
+    fastCache.initializeMetrics(metricManager, registry, scope);
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
     params.put("initialSize", "10");
@@ -129,7 +140,7 @@ public class TestFastLRUCache extends LuceneTestCase {
     }
     assertEquals("25", fastCache.get(25));
     assertEquals(null, fastCache.get(110));
-    NamedList<Serializable> nl = fastCache.getStatistics();
+    Map<String,Object> nl = fastCache.getMetricsMap().getValue();
     assertEquals(2L, nl.get("lookups"));
     assertEquals(1L, nl.get("hits"));
     assertEquals(101L, nl.get("inserts"));
@@ -177,6 +188,7 @@ public class TestFastLRUCache extends LuceneTestCase {
   
   public void testSimple() throws IOException {
     FastLRUCache sc = new FastLRUCache();
+    sc.initializeMetrics(metricManager, registry, scope);
     Map l = new HashMap();
     l.put("size", "100");
     l.put("initialSize", "10");
@@ -189,7 +201,8 @@ public class TestFastLRUCache extends LuceneTestCase {
     }
     assertEquals("25", sc.get(25));
     assertEquals(null, sc.get(110));
-    NamedList nl = sc.getStatistics();
+    MetricsMap metrics = sc.getMetricsMap();
+    Map<String,Object> nl = metrics.getValue();
     assertEquals(2L, nl.get("lookups"));
     assertEquals(1L, nl.get("hits"));
     assertEquals(101L, nl.get("inserts"));
@@ -198,6 +211,7 @@ public class TestFastLRUCache extends LuceneTestCase {
 
 
     FastLRUCache scNew = new FastLRUCache();
+    scNew.initializeMetrics(metricManager, registry, scope);
     scNew.init(l, o, cr);
     scNew.warm(null, sc);
     scNew.setState(SolrCache.State.LIVE);
@@ -205,7 +219,7 @@ public class TestFastLRUCache extends LuceneTestCase {
     scNew.put(103, "103");
     assertEquals("90", scNew.get(90));
     assertEquals(null, scNew.get(50));
-    nl = scNew.getStatistics();
+    nl = scNew.getMetricsMap().getValue();
     assertEquals(2L, nl.get("lookups"));
     assertEquals(1L, nl.get("hits"));
     assertEquals(1L, nl.get("inserts"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java b/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java
index 8fe3f97..c36066a 100644
--- a/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java
+++ b/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java
@@ -17,6 +17,7 @@
 package org.apache.solr.search;
 
 import java.io.IOException;
+import java.util.Date;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.CountDownLatch;
@@ -25,6 +26,8 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Metric;
 import com.google.common.collect.ImmutableMap;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexReaderContext;
@@ -137,13 +140,15 @@ public class TestIndexSearcher extends SolrTestCaseJ4 {
     int baseRefCount = r3.getRefCount();
     assertEquals(1, baseRefCount);
 
-    Object sr3SearcherRegAt = sr3.getSearcher().getStatistics().get("registeredAt");
+    Map<String, Metric> metrics = h.getCore().getCoreMetricManager().getRegistry().getMetrics();
+    Gauge<Date> g = (Gauge<Date>)metrics.get("SEARCHER.searcher.registeredAt");
+    Date sr3SearcherRegAt = g.getValue();
     assertU(commit()); // nothing has changed
     SolrQueryRequest sr4 = req("q","foo");
     assertSame("nothing changed, searcher should be the same",
                sr3.getSearcher(), sr4.getSearcher());
     assertEquals("nothing changed, searcher should not have been re-registered",
-                 sr3SearcherRegAt, sr4.getSearcher().getStatistics().get("registeredAt"));
+                 sr3SearcherRegAt, g.getValue());
     IndexReader r4 = sr4.getSearcher().getRawReader();
 
     // force an index change so the registered searcher won't be the one we are testing (and

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
index d137875..8207522 100644
--- a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
@@ -16,9 +16,10 @@
  */
 package org.apache.solr.search;
 
+import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.util.ExecutorUtil;
-import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.util.ConcurrentLFUCache;
 import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.apache.solr.util.RefCounted;
@@ -32,6 +33,7 @@ import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
 import java.util.Locale;
 import java.util.Map;
+import java.util.Random;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
@@ -59,7 +61,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
       SolrIndexSearcher searcher = holder.get();
       LFUCache cacheDecayTrue = (LFUCache) searcher.getCache("lfuCacheDecayTrue");
       assertNotNull(cacheDecayTrue);
-      NamedList stats = cacheDecayTrue.getStatistics();
+      Map<String,Object> stats = cacheDecayTrue.getMetricsMap().getValue();
       assertTrue((Boolean) stats.get("timeDecay"));
       addCache(cacheDecayTrue, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
       for (int idx = 0; idx < 64; ++idx) {
@@ -70,7 +72,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
 
       LFUCache cacheDecayDefault = (LFUCache) searcher.getCache("lfuCacheDecayDefault");
       assertNotNull(cacheDecayDefault);
-      stats = cacheDecayDefault.getStatistics();
+      stats = cacheDecayDefault.getMetricsMap().getValue();
       assertTrue((Boolean) stats.get("timeDecay"));
       addCache(cacheDecayDefault, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
       assertCache(cacheDecayDefault, 1, 2, 3, 4, 5);
@@ -84,7 +86,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
 
       LFUCache cacheDecayFalse = (LFUCache) searcher.getCache("lfuCacheDecayFalse");
       assertNotNull(cacheDecayFalse);
-      stats = cacheDecayFalse.getStatistics();
+      stats = cacheDecayFalse.getMetricsMap().getValue();
       assertFalse((Boolean) stats.get("timeDecay"));
       addCache(cacheDecayFalse, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
       assertCache(cacheDecayFalse, 1, 2, 3, 4, 5);
@@ -131,9 +133,16 @@ public class TestLFUCache extends SolrTestCaseJ4 {
 
   @Test
   public void testSimple() throws IOException {
+    SolrMetricManager metricManager = new SolrMetricManager();
+    Random r = random();
+    String registry = TestUtil.randomSimpleString(r, 2, 10);
+    String scope = TestUtil.randomSimpleString(r, 2, 10);
     LFUCache lfuCache = new LFUCache();
     LFUCache newLFUCache = new LFUCache();
     LFUCache noWarmLFUCache = new LFUCache();
+    lfuCache.initializeMetrics(metricManager, registry, scope + ".lfuCache");
+    newLFUCache.initializeMetrics(metricManager, registry, scope + ".newLFUCache");
+    noWarmLFUCache.initializeMetrics(metricManager, registry, scope + ".noWarmLFUCache");
     try {
       Map params = new HashMap();
       params.put("size", "100");
@@ -148,7 +157,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
       assertEquals("15", lfuCache.get(15));
       assertEquals("75", lfuCache.get(75));
       assertEquals(null, lfuCache.get(110));
-      NamedList nl = lfuCache.getStatistics();
+      Map<String,Object> nl = lfuCache.getMetricsMap().getValue();
       assertEquals(3L, nl.get("lookups"));
       assertEquals(2L, nl.get("hits"));
       assertEquals(101L, nl.get("inserts"));
@@ -164,7 +173,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
       assertEquals("15", newLFUCache.get(15));
       assertEquals("75", newLFUCache.get(75));
       assertEquals(null, newLFUCache.get(50));
-      nl = newLFUCache.getStatistics();
+      nl = newLFUCache.getMetricsMap().getValue();
       assertEquals(3L, nl.get("lookups"));
       assertEquals(2L, nl.get("hits"));
       assertEquals(1L, nl.get("inserts"));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestLRUCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestLRUCache.java b/solr/core/src/test/org/apache/solr/search/TestLRUCache.java
index d2f74de..fa34911 100644
--- a/solr/core/src/test/org/apache/solr/search/TestLRUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestLRUCache.java
@@ -17,21 +17,25 @@
 package org.apache.solr.search;
 
 import java.io.IOException;
-import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.RamUsageEstimator;
+import org.apache.lucene.util.TestUtil;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.SolrMetricManager;
 
 /**
  * Test for <code>org.apache.solr.search.LRUCache</code>
  */
 public class TestLRUCache extends LuceneTestCase {
 
+  SolrMetricManager metricManager = new SolrMetricManager();
+  String registry = TestUtil.randomSimpleString(random(), 2, 10);
+  String scope = TestUtil.randomSimpleString(random(), 2, 10);
+
   public void testFullAutowarm() throws IOException {
     LRUCache<Object, Object> lruCache = new LRUCache<>();
     Map<String, String> params = new HashMap<>();
@@ -97,6 +101,7 @@ public class TestLRUCache extends LuceneTestCase {
   @SuppressWarnings("unchecked")
   public void testNoAutowarm() throws IOException {
     LRUCache<Object, Object> lruCache = new LRUCache<>();
+    lruCache.initializeMetrics(metricManager, registry, scope);
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
     params.put("initialSize", "10");
@@ -108,7 +113,7 @@ public class TestLRUCache extends LuceneTestCase {
     }
     assertEquals("25", lruCache.get(25));
     assertEquals(null, lruCache.get(110));
-    NamedList<Serializable> nl = lruCache.getStatistics();
+    Map<String,Object> nl = lruCache.getMetricsMap().getValue();
     assertEquals(2L, nl.get("lookups"));
     assertEquals(1L, nl.get("hits"));
     assertEquals(101L, nl.get("inserts"));
@@ -126,6 +131,7 @@ public class TestLRUCache extends LuceneTestCase {
 
   public void testMaxRamSize() throws Exception {
     LRUCache<String, Accountable> accountableLRUCache = new LRUCache<>();
+    accountableLRUCache.initializeMetrics(metricManager, registry, scope);
     Map<String, String> params = new HashMap<>();
     params.put("size", "5");
     params.put("maxRamMB", "1");
@@ -149,7 +155,7 @@ public class TestLRUCache extends LuceneTestCase {
     });
     assertEquals(1, accountableLRUCache.size());
     assertEquals(baseSize + 512 * 1024 + LRUCache.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + LRUCache.DEFAULT_RAM_BYTES_USED, accountableLRUCache.ramBytesUsed());
-    NamedList<Serializable> nl = accountableLRUCache.getStatistics();
+    Map<String,Object> nl = accountableLRUCache.getMetricsMap().getValue();
     assertEquals(1L, nl.get("evictions"));
     assertEquals(1L, nl.get("evictionsRamUsage"));
     accountableLRUCache.put("3", new Accountable() {
@@ -158,7 +164,7 @@ public class TestLRUCache extends LuceneTestCase {
         return 1024;
       }
     });
-    nl = accountableLRUCache.getStatistics();
+    nl = accountableLRUCache.getMetricsMap().getValue();
     assertEquals(1L, nl.get("evictions"));
     assertEquals(1L, nl.get("evictionsRamUsage"));
     assertEquals(2L, accountableLRUCache.size());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
index e4d6a5b..42d05e9 100644
--- a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
@@ -16,11 +16,12 @@
  */
 package org.apache.solr.search;
 
+import java.util.Map;
+
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.metrics.MetricsMap;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -376,8 +377,8 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
         "//result/doc[5]/float[@name='id'][.='2.0']"
     );
 
-    SolrInfoMBean info  = h.getCore().getInfoRegistry().get("queryResultCache");
-    NamedList stats = info.getStatistics();
+    MetricsMap metrics = (MetricsMap)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache");
+    Map<String,Object> stats = metrics.getValue();
 
     long inserts = (Long) stats.get("inserts");
 
@@ -401,8 +402,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
     );
 
 
-    info  = h.getCore().getInfoRegistry().get("queryResultCache");
-    stats = info.getStatistics();
+    stats = metrics.getValue();
 
     long inserts1 = (Long) stats.get("inserts");
 
@@ -426,8 +426,7 @@ public class TestReRankQParserPlugin extends SolrTestCaseJ4 {
         "//result/doc[5]/float[@name='id'][.='1.0']"
     );
 
-    info  = h.getCore().getInfoRegistry().get("queryResultCache");
-    stats = info.getStatistics();
+    stats = metrics.getValue();
     long inserts2 = (Long) stats.get("inserts");
     //Last query was NOT added to the cache
     assertTrue(inserts1 == inserts2);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestRecovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestRecovery.java b/solr/core/src/test/org/apache/solr/search/TestRecovery.java
index 4b0c51c..b6ec6b1 100644
--- a/solr/core/src/test/org/apache/solr/search/TestRecovery.java
+++ b/solr/core/src/test/org/apache/solr/search/TestRecovery.java
@@ -17,6 +17,7 @@
 package org.apache.solr.search;
 
 
+import static org.apache.solr.search.TestRecovery.VersionProvider.*;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
 
 import com.codahale.metrics.Gauge;
@@ -292,10 +293,15 @@ public class TestRecovery extends SolrTestCaseJ4 {
   @Test
   public void testLogReplayWithReorderedDBQ() throws Exception {
     testLogReplayWithReorderedDBQWrapper(() -> {
-          updateJ(jsonAdd(sdoc("id", "RDBQ1_1", "_version_", "1010")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
-          updateJ(jsonDelQ("id:RDBQ1_2"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "-1017")); // This should've arrived after the 1015th update
-          updateJ(jsonAdd(sdoc("id", "RDBQ1_2", "_version_", "1015")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
-          updateJ(jsonAdd(sdoc("id", "RDBQ1_3", "_version_", "1020")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          String v1010 = getNextVersion();
+          String v1015 = getNextVersion();
+          String v1017_del = "-" + getNextVersion();
+          String v1020 = getNextVersion();
+          
+          updateJ(jsonAdd(sdoc("id", "RDBQ1_1", "_version_", v1010)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonDelQ("id:RDBQ1_2"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); // This should've arrived after the ver2 update
+          updateJ(jsonAdd(sdoc("id", "RDBQ1_2", "_version_", v1015)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdoc("id", "RDBQ1_3", "_version_", v1020)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
         },
         () -> assertJQ(req("q", "*:*"), "/response/numFound==2")
     );
@@ -304,16 +310,22 @@ public class TestRecovery extends SolrTestCaseJ4 {
   @Test
   public void testLogReplayWithReorderedDBQByAsterixAndChildDocs() throws Exception {
     testLogReplayWithReorderedDBQWrapper(() -> {
+          String v1010 = getNextVersion();
+          String v1012 = getNextVersion();
+          String v1017_del = "-" + getNextVersion();
+          String v1018 = getNextVersion();
+          String v1020 = getNextVersion();
+          
           // 1010 - will be deleted
-          updateJ(jsonAdd(sdocWithChildren("RDBQ2_1", "1010")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ2_1", v1010)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
           // 1018 - should be kept, including child docs
-          updateJ(jsonAdd(sdocWithChildren("RDBQ2_2", "1018")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ2_2", v1018)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
           // 1017 - delete should affect only 1010
-          updateJ(jsonDelQ("_root_:RDBQ2_1 _root_:RDBQ2_2 id:RDBQ2_3 _root_:RDBQ2_4"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "-1017")); // This should've arrived after the 1015th update
+          updateJ(jsonDelQ("_root_:RDBQ2_1 _root_:RDBQ2_2 id:RDBQ2_3 _root_:RDBQ2_4"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); // This should've arrived after the ver2 update
           // 1012 - will be deleted
-          updateJ(jsonAdd(sdoc("id", "RDBQ2_3", "_version_", "1012")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdoc("id", "RDBQ2_3", "_version_", v1012)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
           // 1020 - should be untouched
-          updateJ(jsonAdd(sdocWithChildren("RDBQ2_4", "1020")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ2_4", v1020)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
         },
         () -> assertJQ(req("q", "*:*"), "/response/numFound==6")
     );
@@ -322,16 +334,22 @@ public class TestRecovery extends SolrTestCaseJ4 {
   @Test
   public void testLogReplayWithReorderedDBQByIdAndChildDocs() throws Exception {
     testLogReplayWithReorderedDBQWrapper(() -> {
+          String v1010 = getNextVersion();
+          String v1012 = getNextVersion();
+          String v1017_del = "-" + getNextVersion();
+          String v1018 = getNextVersion();
+          String v1020 = getNextVersion();
+      
           // 1010 - will be deleted
-          updateJ(jsonAdd(sdocWithChildren("RDBQ3_1", "1010")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ3_1", v1010)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
           // 1018 - should be kept, including child docs
-          updateJ(jsonAdd(sdocWithChildren("RDBQ3_2", "1018")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ3_2", v1018)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
           // 1017 - delete should affect only 1010
-          updateJ(jsonDelQ("id:RDBQ3_1 id:RDBQ3_2 id:RDBQ3_3 id:RDBQ3_4"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "-1017")); // This should've arrived after the 1015th update
+          updateJ(jsonDelQ("id:RDBQ3_1 id:RDBQ3_2 id:RDBQ3_3 id:RDBQ3_4"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); // This should've arrived after the ver2 update
           // 1012 - will be deleted
-          updateJ(jsonAdd(sdoc("id", "RDBQ3_3", "_version_", "1012")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdoc("id", "RDBQ3_3", "_version_", v1012)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
           // 1020 - should be untouched
-          updateJ(jsonAdd(sdocWithChildren("RDBQ3_4", "1020")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ3_4", v1020)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
         },
         () -> assertJQ(req("q", "*:*"), "/response/numFound==8") // RDBQ3_2, RDBQ3_4 and 6 children docs (delete by id does not delete child docs)
     );
@@ -340,10 +358,13 @@ public class TestRecovery extends SolrTestCaseJ4 {
   @Test
   public void testLogReplayWithReorderedDBQInsertingChildnodes() throws Exception {
     testLogReplayWithReorderedDBQWrapper(() -> {
-          updateJ(jsonDelQ("id:RDBQ4_2"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "-1017"));
+          String v1013 = getNextVersion();
+          String v1017_del = "-" + getNextVersion();
+          
+          updateJ(jsonDelQ("id:RDBQ4_2"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del));
           // test doc: B1
           // 1013 - will be inserted with 3 children
-          updateJ(jsonAdd(sdocWithChildren("RDBQ4_1", "1013", 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ4_1", v1013, 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
         },
         () -> assertJQ(req("q", "*:*"), "/response/numFound==4") // RDBQ4_1 and RDBQ4_2, plus 2x 3 children
     );
@@ -353,17 +374,23 @@ public class TestRecovery extends SolrTestCaseJ4 {
   @Test
   public void testLogReplayWithReorderedDBQUpdateWithDifferentChildCount() throws Exception {
     testLogReplayWithReorderedDBQWrapper(() -> {
+          String v1011 = getNextVersion();
+          String v1012 = getNextVersion();
+          String v1013 = getNextVersion();
+          String v1018 = getNextVersion();
+          String v1019_del = "-" + getNextVersion();
+      
           // control
-          // 1013 - will be inserted with 3 children
-          updateJ(jsonAdd(sdocWithChildren("RDBQ5_1", "1011", 2)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          // 1011 - will be inserted with 3 children as 1012
+          updateJ(jsonAdd(sdocWithChildren("RDBQ5_1", v1011, 2)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          // 1012 - this should be the final
+          updateJ(jsonAdd(sdocWithChildren("RDBQ5_1", v1012, 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+
+          // 1013 - will be inserted with 3 children as 1018
+          updateJ(jsonAdd(sdocWithChildren("RDBQ5_2", v1013, 2)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonDelQ("id:RDBQ5_3"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1019_del));
           // 1018 - this should be the final
-          updateJ(jsonAdd(sdocWithChildren("RDBQ5_1", "1012", 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
-
-          // 1013 - will be inserted with 3 children
-          updateJ(jsonAdd(sdocWithChildren("RDBQ5_2", "1013", 2)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
-          updateJ(jsonDelQ("id:RDBQ5_3"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "-1019"));
-          // 1018 - this should be the final
-          updateJ(jsonAdd(sdocWithChildren("RDBQ5_2", "1018", 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
+          updateJ(jsonAdd(sdocWithChildren("RDBQ5_2", v1018, 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
         },
         () -> assertJQ(req("q", "*:*"), "/response/numFound==8") // RDBQ5_1+3children+RDBQ5_2+3children
     );
@@ -469,23 +496,43 @@ public class TestRecovery extends SolrTestCaseJ4 {
       int initialOps = bufferedOps.getValue();
       Meter applyingBuffered = (Meter)metrics.get("TLOG.applyingBuffered.ops");
       long initialApplyingOps = applyingBuffered.getCount();
+      
+      String v3 = getNextVersion();
+      String v940_del = "-" + getNextVersion();
+      String v950_del = "-" + getNextVersion();
+      String v1010 = getNextVersion();
+      String v1015 = getNextVersion();
+      String v1017_del = "-" + getNextVersion();
+      String v1020 = getNextVersion();
+      String v1030 = getNextVersion();
+      String v1040 = getNextVersion();
+      String v1050 = getNextVersion();
+      String v1060 = getNextVersion();
+      String v1070 = getNextVersion();
+      String v1080 = getNextVersion();
+      String v2010_del = "-" + getNextVersion();
+      String v2060_del = "-" + getNextVersion();
+      String v3000_del = "-" + getNextVersion();
+
+      String versionListFirstCheck = String.join(",", v2010_del, v1030, v1020, v1017_del, v1015, v1010);
+      String versionListSecondCheck = String.join(",", v3000_del, v1080, v1050, v1060, v940_del, v1040 ,v3, v2010_del, v1030, v1020, v1017_del, v1015, v1010);
 
       // simulate updates from a leader
-      updateJ(jsonAdd(sdoc("id","B1", "_version_","1010")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","B11", "_version_","1015")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonDelQ("id:B1 id:B11 id:B2 id:B3"), params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-1017"));
-      updateJ(jsonAdd(sdoc("id","B2", "_version_","1020")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","B3", "_version_","1030")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      deleteAndGetVersion("B1", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-2010"));
+      updateJ(jsonAdd(sdoc("id","B1", "_version_",v1010)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","B11", "_version_",v1015)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonDelQ("id:B1 id:B11 id:B2 id:B3"), params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v1017_del));
+      updateJ(jsonAdd(sdoc("id","B2", "_version_",v1020)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","B3", "_version_",v1030)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      deleteAndGetVersion("B1", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v2010_del));
 
       assertJQ(req("qt","/get", "getVersions","6")
-          ,"=={'versions':[-2010,1030,1020,-1017,1015,1010]}"
+          ,"=={'versions':["+versionListFirstCheck+"]}"
       );
 
       assertU(commit());
 
       assertJQ(req("qt","/get", "getVersions","6")
-          ,"=={'versions':[-2010,1030,1020,-1017,1015,1010]}"
+          ,"=={'versions':["+versionListFirstCheck+"]}"
       );
 
       // updates should be buffered, so we should not see any results yet.
@@ -515,7 +562,7 @@ public class TestRecovery extends SolrTestCaseJ4 {
       assertEquals(6L, applyingBuffered.getCount() - initialApplyingOps);
 
       assertJQ(req("qt","/get", "getVersions","6")
-          ,"=={'versions':[-2010,1030,1020,-1017,1015,1010]}"
+          ,"=={'versions':["+versionListFirstCheck+"]}"
       );
 
 
@@ -528,24 +575,24 @@ public class TestRecovery extends SolrTestCaseJ4 {
       assertEquals(UpdateLog.State.BUFFERING, ulog.getState());
 
       Long ver = getVer(req("qt","/get", "id","B3"));
-      assertEquals(1030L, ver.longValue());
+      assertEquals(Long.valueOf(v1030), ver);
 
       // add a reordered doc that shouldn't overwrite one in the index
-      updateJ(jsonAdd(sdoc("id","B3", "_version_","3")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","B3", "_version_",v3)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       // reorder two buffered updates
-      updateJ(jsonAdd(sdoc("id","B4", "_version_","1040")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      deleteAndGetVersion("B4", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-940"));   // this update should not take affect
-      updateJ(jsonAdd(sdoc("id","B6", "_version_","1060")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","B5", "_version_","1050")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","B8", "_version_","1080")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","B4", "_version_",v1040)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      deleteAndGetVersion("B4", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v940_del));   // this update should not take affect
+      updateJ(jsonAdd(sdoc("id","B6", "_version_",v1060)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","B5", "_version_",v1050)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","B8", "_version_",v1080)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       // test that delete by query is at least buffered along with everything else so it will delete the
       // currently buffered id:8 (even if it doesn't currently support versioning)
-      updateJ("{\"delete\": { \"query\":\"id:B2 OR id:B8\" }}", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-3000"));
+      updateJ("{\"delete\": { \"query\":\"id:B2 OR id:B8\" }}", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v3000_del));
 
       assertJQ(req("qt","/get", "getVersions","13")
-          ,"=={'versions':[-3000,1080,1050,1060,-940,1040,3,-2010,1030,1020,-1017,1015,1010]}"  // the "3" appears because versions aren't checked while buffering
+          ,"=={'versions':[" + versionListSecondCheck + "]}"  // the "3" appears because versions aren't checked while buffering
       );
 
       logReplay.drainPermits();
@@ -557,22 +604,22 @@ public class TestRecovery extends SolrTestCaseJ4 {
       logReplay.release(1);
 
       // now add another update
-      updateJ(jsonAdd(sdoc("id","B7", "_version_","1070")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","B7", "_version_",v1070)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       // a reordered update that should be dropped
-      deleteAndGetVersion("B5", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-950"));
+      deleteAndGetVersion("B5", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v950_del));
 
-      deleteAndGetVersion("B6", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_","-2060"));
+      deleteAndGetVersion("B6", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v2060_del));
 
       logReplay.release(1000);
       UpdateLog.RecoveryInfo recInfo = rinfoFuture.get();
 
       assertJQ(req("q", "*:*", "sort","id asc", "fl","id,_version_")
           , "/response/docs==["
-                           + "{'id':'B3','_version_':1030}"
-                           + ",{'id':'B4','_version_':1040}"
-                           + ",{'id':'B5','_version_':1050}"
-                           + ",{'id':'B7','_version_':1070}"
+                           + "{'id':'B3','_version_':"+v1030+"}"
+                           + ",{'id':'B4','_version_':"+v1040+"}"
+                           + ",{'id':'B5','_version_':"+v1050+"}"
+                           + ",{'id':'B7','_version_':"+v1070+"}"
                            +"]"
       );
 
@@ -615,6 +662,22 @@ public class TestRecovery extends SolrTestCaseJ4 {
     UpdateLog ulog = uhandler.getUpdateLog();
 
     try {
+      String v101 = getNextVersion();
+      String v102 = getNextVersion();
+      String v103 = getNextVersion();
+      String v104 = getNextVersion();
+      String v105 = getNextVersion();
+      String v200 = getNextVersion();
+      String v201 = getNextVersion();
+      String v203 = getNextVersion();
+      String v204 = getNextVersion();
+      String v205 = getNextVersion();
+      String v206 = getNextVersion();
+      String v301 = getNextVersion();
+      String v302 = getNextVersion();
+      String v998 = getNextVersion();
+      String v999 = getNextVersion();
+      
       clearIndex();
       assertU(commit());
 
@@ -629,14 +692,14 @@ public class TestRecovery extends SolrTestCaseJ4 {
       assertEquals(UpdateLog.State.BUFFERING, ulog.getState());
 
       // simulate updates from a leader
-      updateJ(jsonAdd(sdoc("id","C1", "_version_","101")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","C2", "_version_","102")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","C3", "_version_","103")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C1", "_version_",v101)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C2", "_version_",v102)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C3", "_version_",v103)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       assertTrue(ulog.dropBufferedUpdates());
       ulog.bufferUpdates();
-      updateJ(jsonAdd(sdoc("id", "C4", "_version_","104")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id", "C5", "_version_","105")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id", "C4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id", "C5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       logReplay.release(1000);
       rinfoFuture = ulog.applyBufferedUpdates();
@@ -644,21 +707,21 @@ public class TestRecovery extends SolrTestCaseJ4 {
       assertEquals(2, rinfo.adds);
 
       assertJQ(req("qt","/get", "getVersions","2")
-          ,"=={'versions':[105,104]}"
+          ,"=={'versions':["+v105+","+v104+"]}"
       );
 
       // this time add some docs first before buffering starts (so tlog won't be at pos 0)
-      updateJ(jsonAdd(sdoc("id","C100", "_version_","200")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","C101", "_version_","201")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C100", "_version_",v200)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C101", "_version_",v201)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       ulog.bufferUpdates();
-      updateJ(jsonAdd(sdoc("id","C103", "_version_","203")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","C104", "_version_","204")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C103", "_version_",v203)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C104", "_version_",v204)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       assertTrue(ulog.dropBufferedUpdates());
       ulog.bufferUpdates();
-      updateJ(jsonAdd(sdoc("id","C105", "_version_","205")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","C106", "_version_","206")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C105", "_version_",v205)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C106", "_version_",v206)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       rinfoFuture = ulog.applyBufferedUpdates();
       rinfo = rinfoFuture.get();
@@ -666,45 +729,45 @@ public class TestRecovery extends SolrTestCaseJ4 {
 
       assertJQ(req("q", "*:*", "sort","_version_ asc", "fl","id,_version_")
           , "/response/docs==["
-          + "{'id':'C4','_version_':104}"
-          + ",{'id':'C5','_version_':105}"
-          + ",{'id':'C100','_version_':200}"
-          + ",{'id':'C101','_version_':201}"
-          + ",{'id':'C105','_version_':205}"
-          + ",{'id':'C106','_version_':206}"
+          + "{'id':'C4','_version_':"+v104+"}"
+          + ",{'id':'C5','_version_':"+v105+"}"
+          + ",{'id':'C100','_version_':"+v200+"}"
+          + ",{'id':'C101','_version_':"+v201+"}"
+          + ",{'id':'C105','_version_':"+v205+"}"
+          + ",{'id':'C106','_version_':"+v206+"}"
           +"]"
       );
 
       assertJQ(req("qt","/get", "getVersions","6")
-          ,"=={'versions':[206,205,201,200,105,104]}"
+          ,"=={'versions':["+String.join(",",v206,v205,v201,v200,v105,v104)+"]}"
       );
 
       ulog.bufferUpdates();
       assertEquals(UpdateLog.State.BUFFERING, ulog.getState());
-      updateJ(jsonAdd(sdoc("id","C301", "_version_","998")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","C302", "_version_","999")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C301", "_version_",v998)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C302", "_version_",v999)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
       assertTrue(ulog.dropBufferedUpdates());
 
       // make sure we can overwrite with a lower version
       // TODO: is this functionality needed?
-      updateJ(jsonAdd(sdoc("id","C301", "_version_","301")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","C302", "_version_","302")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C301", "_version_",v301)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C302", "_version_",v302)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       assertU(commit());
 
       assertJQ(req("qt","/get", "getVersions","2")
-          ,"=={'versions':[302,301]}"
+          ,"=={'versions':["+v302+","+v301+"]}"
       );
 
       assertJQ(req("q", "*:*", "sort","_version_ desc", "fl","id,_version_", "rows","2")
           , "/response/docs==["
-          + "{'id':'C302','_version_':302}"
-          + ",{'id':'C301','_version_':301}"
+          + "{'id':'C302','_version_':"+v302+"}"
+          + ",{'id':'C301','_version_':"+v301+"}"
           +"]"
       );
 
 
-      updateJ(jsonAdd(sdoc("id","C2", "_version_","302")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","C2", "_version_",v302)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
 
 
@@ -744,6 +807,18 @@ public class TestRecovery extends SolrTestCaseJ4 {
     Future<UpdateLog.RecoveryInfo> rinfoFuture;
 
     try {
+      String v101 = getNextVersion();
+      String v102 = getNextVersion();
+      String v103 = getNextVersion();
+      String v104 = getNextVersion();
+      String v105 = getNextVersion();
+      String v200 = getNextVersion();
+      String v201 = getNextVersion();
+      String v203 = getNextVersion();
+      String v204 = getNextVersion();
+      String v205 = getNextVersion();
+      String v206 = getNextVersion();
+      
       clearIndex();
       assertU(commit());
       assertEquals(UpdateLog.State.ACTIVE, ulog.getState());
@@ -752,16 +827,16 @@ public class TestRecovery extends SolrTestCaseJ4 {
       assertEquals(UpdateLog.State.BUFFERING, ulog.getState());
 
       // simulate updates from a leader
-      updateJ(jsonAdd(sdoc("id","c1", "_version_","101")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","c2", "_version_","102")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","c3", "_version_","103")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c1", "_version_",v101)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c2", "_version_",v102)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c3", "_version_",v103)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       // call bufferUpdates again (this currently happens when recovery fails)... we should get a new starting point
       ulog.bufferUpdates();
       assertEquals(UpdateLog.State.BUFFERING, ulog.getState());
 
-      updateJ(jsonAdd(sdoc("id", "c4", "_version_","104")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id", "c5", "_version_","105")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id", "c4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id", "c5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       logReplay.release(1000);
       rinfoFuture = ulog.applyBufferedUpdates();
@@ -769,21 +844,21 @@ public class TestRecovery extends SolrTestCaseJ4 {
       assertEquals(2, rinfo.adds);
 
       assertJQ(req("qt","/get", "getVersions","2")
-          ,"=={'versions':[105,104]}"
+          ,"=={'versions':["+v105+","+v104+"]}"
       );
 
       // this time add some docs first before buffering starts (so tlog won't be at pos 0)
-      updateJ(jsonAdd(sdoc("id","c100", "_version_","200")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","c101", "_version_","201")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c100", "_version_",v200)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c101", "_version_",v201)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       ulog.bufferUpdates();
-      updateJ(jsonAdd(sdoc("id","c103", "_version_","203")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","c104", "_version_","204")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c103", "_version_",v203)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c104", "_version_",v204)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       // call bufferUpdates again (this currently happens when recovery fails)... we should get a new starting point
       ulog.bufferUpdates();
-      updateJ(jsonAdd(sdoc("id","c105", "_version_","205")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","c106", "_version_","206")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c105", "_version_",v205)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","c106", "_version_",v206)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       rinfoFuture = ulog.applyBufferedUpdates();
       rinfo = rinfoFuture.get();
@@ -791,19 +866,19 @@ public class TestRecovery extends SolrTestCaseJ4 {
 
       assertJQ(req("q", "*:*", "sort","_version_ asc", "fl","id,_version_")
           , "/response/docs==["
-              + "{'id':'c4','_version_':104}"
-              + ",{'id':'c5','_version_':105}"
-              + ",{'id':'c100','_version_':200}"
-              + ",{'id':'c101','_version_':201}"
-              + ",{'id':'c105','_version_':205}"
-              + ",{'id':'c106','_version_':206}"
-              +"]"
+              + "{'id':'c4','_version_':"+v104+"}"
+              + ",{'id':'c5','_version_':"+v105+"}"
+              + ",{'id':'c100','_version_':"+v200+"}"
+              + ",{'id':'c101','_version_':"+v201+"}"
+              + ",{'id':'c105','_version_':"+v205+"}"
+              + ",{'id':'c106','_version_':"+v206+"}"
++""              +"]"
       );
 
       // The updates that were buffered (but never applied) still appear in recent versions!
       // This is good for some uses, but may not be good for others.
       assertJQ(req("qt","/get", "getVersions","11")
-          ,"=={'versions':[206,205,204,203,201,200,105,104,103,102,101]}"
+          ,"=={'versions':["+String.join(",",v206,v205,v204,v203,v201,v200,v105,v104,v103,v102,v101)+"]}"
       );
 
       assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state
@@ -864,6 +939,14 @@ public class TestRecovery extends SolrTestCaseJ4 {
     UpdateLog ulog = uhandler.getUpdateLog();
 
     try {
+      String v101 = getNextVersion();
+      String v102 = getNextVersion();
+      String v103 = getNextVersion();
+      String v114 = getNextVersion();
+      String v115 = getNextVersion();
+      String v116 = getNextVersion();
+      String v117 = getNextVersion();
+      
       clearIndex();
       assertU(commit());
 
@@ -871,9 +954,9 @@ public class TestRecovery extends SolrTestCaseJ4 {
       ulog.bufferUpdates();
 
       // simulate updates from a leader
-      updateJ(jsonAdd(sdoc("id","Q1", "_version_","101")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","Q2", "_version_","102")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","Q3", "_version_","103")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","Q1", "_version_",v101)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","Q2", "_version_",v102)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","Q3", "_version_",v103)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
       assertEquals(UpdateLog.State.BUFFERING, ulog.getState());
 
       req.close();
@@ -903,9 +986,9 @@ public class TestRecovery extends SolrTestCaseJ4 {
       assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) != 0);
 
       // now do some normal non-buffered adds
-      updateJ(jsonAdd(sdoc("id","Q4", "_version_","114")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","Q5", "_version_","115")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","Q6", "_version_","116")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","Q4", "_version_",v114)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","Q5", "_version_",v115)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","Q6", "_version_",v116)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
       assertU(commit());
 
       req.close();
@@ -921,7 +1004,7 @@ public class TestRecovery extends SolrTestCaseJ4 {
       ulog.bufferUpdates();
       // simulate receiving no updates
       ulog.applyBufferedUpdates();
-      updateJ(jsonAdd(sdoc("id","Q7", "_version_","117")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // do another add to make sure flags are back to normal
+      updateJ(jsonAdd(sdoc("id","Q7", "_version_",v117)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // do another add to make sure flags are back to normal
 
       req.close();
       h.close();
@@ -950,26 +1033,29 @@ public class TestRecovery extends SolrTestCaseJ4 {
   // make sure that on a restart, versions don't start too low
   @Test
   public void testVersionsOnRestart() throws Exception {
+    String v1 = getNextVersion();
+    String v2 = getNextVersion();
+    
     clearIndex();
     assertU(commit());
 
-    assertU(adoc("id","D1", "val_i","1"));
-    assertU(adoc("id","D2", "val_i","1"));
+    assertU(adoc("id","D1", "val_i",v1));
+    assertU(adoc("id","D2", "val_i",v1));
     assertU(commit());
-    long v1 = getVer(req("q","id:D1"));
-    long v1a = getVer(req("q","id:D2"));
+    long D1Version1 = getVer(req("q","id:D1"));
+    long D2Version1 = getVer(req("q","id:D2"));
 
     h.close();
     createCore();
 
-    assertU(adoc("id","D1", "val_i","2"));
+    assertU(adoc("id","D1", "val_i",v2));
     assertU(commit());
-    long v2 = getVer(req("q","id:D1"));
+    long D1Version2 = getVer(req("q","id:D1"));
 
-    assert(v2 > v1);
+    assert(D1Version2 > D1Version1);
 
     assertJQ(req("qt","/get", "getVersions","2")
-        ,"/versions==[" + v2 + "," + v1a + "]"
+        ,"/versions==[" + D1Version2 + "," + D2Version1 + "]"
     );
 
   }
@@ -997,11 +1083,13 @@ public class TestRecovery extends SolrTestCaseJ4 {
     UpdateLog ulog = uhandler.getUpdateLog();
 
     try {
+      String v1 = getNextVersion();
+      
       clearIndex();
       assertU(commit());
 
-      assertU(adoc("id","E1", "val_i","1"));
-      assertU(adoc("id","E2", "val_i","1"));
+      assertU(adoc("id","E1", "val_i",v1));
+      assertU(adoc("id","E2", "val_i",v1));
 
       // set to a high enough number so this test won't hang on a bug
       logReplay.release(10);
@@ -1203,13 +1291,17 @@ public class TestRecovery extends SolrTestCaseJ4 {
       // Now test that the bad log file doesn't mess up retrieving latest versions
       //
 
-      updateJ(jsonAdd(sdoc("id","F4", "_version_","104")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","F5", "_version_","105")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","F6", "_version_","106")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      String v104 = getNextVersion();
+      String v105 = getNextVersion();
+      String v106 = getNextVersion();
+      
+      updateJ(jsonAdd(sdoc("id","F4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","F5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","F6", "_version_",v106)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       // This currently skips the bad log file and also returns the version of the clearIndex (del *:*)
       // assertJQ(req("qt","/get", "getVersions","6"), "/versions==[106,105,104]");
-      assertJQ(req("qt","/get", "getVersions","3"), "/versions==[106,105,104]");
+      assertJQ(req("qt","/get", "getVersions","3"), "/versions==["+v106+","+v105+","+v104+"]");
 
     } finally {
       DirectUpdateHandler2.commitOnClose = true;
@@ -1259,14 +1351,16 @@ public class TestRecovery extends SolrTestCaseJ4 {
       //
       // Now test that the bad log file doesn't mess up retrieving latest versions
       //
+      String v104 = getNextVersion();
+      String v105 = getNextVersion();
+      String v106 = getNextVersion();
 
-      updateJ(jsonAdd(sdoc("id","G4", "_version_","104")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","G5", "_version_","105")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
-      updateJ(jsonAdd(sdoc("id","G6", "_version_","106")), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","G4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","G5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
+      updateJ(jsonAdd(sdoc("id","G6", "_version_",v106)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
 
       // This currently skips the bad log file and also returns the version of the clearIndex (del *:*)
-      // assertJQ(req("qt","/get", "getVersions","6"), "/versions==[106,105,104]");
-      assertJQ(req("qt","/get", "getVersions","3"), "/versions==[106,105,104]");
+      assertJQ(req("qt","/get", "getVersions","3"), "/versions==["+v106+","+v105+","+v104+"]");
 
       assertU(commit());
 
@@ -1554,5 +1648,13 @@ public class TestRecovery extends SolrTestCaseJ4 {
 
     return (Long)doc.get("_version_");
   }
+  
+  static class VersionProvider{
+    private static long version = 0;
+    
+    static String getNextVersion() {
+      return Long.toString(version++);
+    }
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java
index 1fcfe9a..b909f15 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java
@@ -20,6 +20,7 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.FacetParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.request.SolrQueryRequest;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -117,13 +118,13 @@ public class TestSolr4Spatial2 extends SolrTestCaseJ4 {
 
     // The tricky thing is verifying the cache works correctly...
 
-    SolrCache cache = (SolrCache) h.getCore().getInfoRegistry().get("perSegSpatialFieldCache_srptgeom");
-    assertEquals("1", cache.getStatistics().get("cumulative_inserts").toString());
-    assertEquals("0", cache.getStatistics().get("cumulative_hits").toString());
+    MetricsMap cacheMetrics = (MetricsMap) h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.perSegSpatialFieldCache_srptgeom");
+    assertEquals("1", cacheMetrics.getValue().get("cumulative_inserts").toString());
+    assertEquals("0", cacheMetrics.getValue().get("cumulative_hits").toString());
 
     // Repeat the query earlier
     assertJQ(sameReq, "/response/numFound==1", "/response/docs/[0]/id=='1'");
-    assertEquals("1", cache.getStatistics().get("cumulative_hits").toString());
+    assertEquals("1", cacheMetrics.getValue().get("cumulative_hits").toString());
 
     assertEquals("1 segment",
         1, getSearcher().getRawReader().leaves().size());
@@ -141,7 +142,7 @@ public class TestSolr4Spatial2 extends SolrTestCaseJ4 {
     // When there are new segments, we accumulate another hit. This tests the cache was not blown away on commit.
     // Checking equality for the first reader's cache key indicates wether the cache should still be valid.
     Object leafKey2 = getFirstLeafReaderKey();
-    assertEquals(leafKey1.equals(leafKey2) ? "2" : "1", cache.getStatistics().get("cumulative_hits").toString());
+    assertEquals(leafKey1.equals(leafKey2) ? "2" : "1", cacheMetrics.getValue().get("cumulative_hits").toString());
 
 
     // Now try to see if heatmaps work:

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java
new file mode 100644
index 0000000..3ae9c47
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import org.apache.lucene.util.TestUtil;
+import org.apache.solr.SolrTestCaseJ4;
+
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Map;
+import java.util.Random;
+
+public class TestSolrFieldCacheBean extends SolrTestCaseJ4 {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("solrconfig.xml","schema-minimal.xml");
+  }
+
+  @Test
+  public void testEntryList() throws Exception {
+    // ensure entries to FieldCache
+    assertU(adoc("id", "id0"));
+    assertU(commit());
+    assertQ(req("q", "*:*", "sort", "id asc"), "//*[@numFound='1']");
+
+    // Test with entry list enabled
+    assertEntryListIncluded(false);
+
+    // Test again with entry list disabled
+    System.setProperty("disableSolrFieldCacheMBeanEntryList", "true");
+    try {
+      assertEntryListNotIncluded(false);
+    } finally {
+      System.clearProperty("disableSolrFieldCacheMBeanEntryList");
+    }
+
+    // Test with entry list enabled for jmx
+    assertEntryListIncluded(true);
+
+    // Test with entry list disabled for jmx
+    System.setProperty("disableSolrFieldCacheMBeanEntryListJmx", "true");
+    try {
+      assertEntryListNotIncluded(true);
+    } finally {
+      System.clearProperty("disableSolrFieldCacheMBeanEntryListJmx");
+    }
+  }
+
+  private void assertEntryListIncluded(boolean checkJmx) {
+    SolrFieldCacheBean mbean = new SolrFieldCacheBean();
+    Random r = random();
+    String registryName = TestUtil.randomSimpleString(r, 1, 10);
+    SolrMetricManager metricManager = h.getCoreContainer().getMetricManager();
+    mbean.initializeMetrics(metricManager, registryName, null);
+    MetricsMap metricsMap = (MetricsMap)metricManager.registry(registryName).getMetrics().get("CACHE.fieldCache");
+    Map<String, Object> metrics = checkJmx ? metricsMap.getValue(true) : metricsMap.getValue();
+    assertTrue(((Number)metrics.get("entries_count")).longValue() > 0);
+    assertNotNull(metrics.get("total_size"));
+    assertNotNull(metrics.get("entry#0"));
+  }
+
+  private void assertEntryListNotIncluded(boolean checkJmx) {
+    SolrFieldCacheBean mbean = new SolrFieldCacheBean();
+    Random r = random();
+    String registryName = TestUtil.randomSimpleString(r, 1, 10);
+    SolrMetricManager metricManager = h.getCoreContainer().getMetricManager();
+    mbean.initializeMetrics(metricManager, registryName, null);
+    MetricsMap metricsMap = (MetricsMap)metricManager.registry(registryName).getMetrics().get("CACHE.fieldCache");
+    Map<String, Object> metrics = checkJmx ? metricsMap.getValue(true) : metricsMap.getValue();
+    assertTrue(((Number)metrics.get("entries_count")).longValue() > 0);
+    assertNull(metrics.get("total_size"));
+    assertNull(metrics.get("entry#0"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java
deleted file mode 100644
index d11c919..0000000
--- a/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheMBean.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.search;
-
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.util.NamedList;
-
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.lang.invoke.MethodHandles;
-
-public class TestSolrFieldCacheMBean extends SolrTestCaseJ4 {
-
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    initCore("solrconfig.xml","schema-minimal.xml");
-  }
-
-  @Test
-  public void testEntryList() throws Exception {
-    // ensure entries to FieldCache
-    assertU(adoc("id", "id0"));
-    assertU(commit());
-    assertQ(req("q", "*:*", "sort", "id asc"), "//*[@numFound='1']");
-
-    // Test with entry list enabled
-    assertEntryListIncluded(false);
-
-    // Test again with entry list disabled
-    System.setProperty("disableSolrFieldCacheMBeanEntryList", "true");
-    try {
-      assertEntryListNotIncluded(false);
-    } finally {
-      System.clearProperty("disableSolrFieldCacheMBeanEntryList");
-    }
-
-    // Test with entry list enabled for jmx
-    assertEntryListIncluded(true);
-
-    // Test with entry list disabled for jmx
-    System.setProperty("disableSolrFieldCacheMBeanEntryListJmx", "true");
-    try {
-      assertEntryListNotIncluded(true);
-    } finally {
-      System.clearProperty("disableSolrFieldCacheMBeanEntryListJmx");
-    }
-  }
-
-  private void assertEntryListIncluded(boolean checkJmx) {
-    SolrFieldCacheMBean mbean = new SolrFieldCacheMBean();
-    NamedList stats = checkJmx ? mbean.getStatisticsForJmx() : mbean.getStatistics();
-    assert(Integer.parseInt(stats.get("entries_count").toString()) > 0);
-    assertNotNull(stats.get("total_size"));
-    assertNotNull(stats.get("entry#0"));
-  }
-
-  private void assertEntryListNotIncluded(boolean checkJmx) {
-    SolrFieldCacheMBean mbean = new SolrFieldCacheMBean();
-    NamedList stats = checkJmx ? mbean.getStatisticsForJmx() : mbean.getStatistics();
-    assert(Integer.parseInt(stats.get("entries_count").toString()) > 0);
-    assertNull(stats.get("total_size"));
-    assertNull(stats.get("entry#0"));
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
index 607f091..f454848 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
@@ -34,9 +34,9 @@ import org.apache.lucene.search.TermInSetQuery;
 import org.apache.lucene.search.TermQuery;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.core.SolrInfoMBean;
 import org.apache.solr.parser.QueryParser;
 import org.apache.solr.query.FilterQuery;
 import org.apache.solr.request.SolrQueryRequest;
@@ -67,6 +67,8 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     assertU(adoc("id", "13", "eee_s", "'balance'", "rrr_s", "/leading_slash"));
 
     assertU(adoc("id", "20", "syn", "wifi ATM"));
+    
+    assertU(adoc("id", "30", "shingle23", "A B X D E"));
 
     assertU(commit());
   }
@@ -387,33 +389,33 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     assertU(commit());  // arg... commit no longer "commits" unless there has been a change.
 
 
-    final SolrInfoMBean filterCacheStats
-        = h.getCore().getInfoRegistry().get("filterCache");
+    final MetricsMap filterCacheStats = (MetricsMap)h.getCore().getCoreMetricManager().getRegistry()
+        .getMetrics().get("CACHE.searcher.filterCache");
     assertNotNull(filterCacheStats);
-    final SolrInfoMBean queryCacheStats
-        = h.getCore().getInfoRegistry().get("queryResultCache");
+    final MetricsMap queryCacheStats = (MetricsMap)h.getCore().getCoreMetricManager().getRegistry()
+        .getMetrics().get("CACHE.searcher.queryResultCache");
 
     assertNotNull(queryCacheStats);
 
 
-    long inserts = (Long) filterCacheStats.getStatistics().get("inserts");
-    long hits = (Long) filterCacheStats.getStatistics().get("hits");
+    long inserts = (Long) filterCacheStats.getValue().get("inserts");
+    long hits = (Long) filterCacheStats.getValue().get("hits");
 
     assertJQ(req("q", "doesnotexist filter(id:1) filter(qqq_s:X) filter(abcdefg)")
         , "/response/numFound==2"
     );
 
     inserts += 3;
-    assertEquals(inserts, ((Long) filterCacheStats.getStatistics().get("inserts")).longValue());
-    assertEquals(hits, ((Long) filterCacheStats.getStatistics().get("hits")).longValue());
+    assertEquals(inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue());
+    assertEquals(hits, ((Long) filterCacheStats.getValue().get("hits")).longValue());
 
     assertJQ(req("q", "doesnotexist2 filter(id:1) filter(qqq_s:X) filter(abcdefg)")
         , "/response/numFound==2"
     );
 
     hits += 3;
-    assertEquals(inserts, ((Long) filterCacheStats.getStatistics().get("inserts")).longValue());
-    assertEquals(hits, ((Long) filterCacheStats.getStatistics().get("hits")).longValue());
+    assertEquals(inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue());
+    assertEquals(hits, ((Long) filterCacheStats.getValue().get("hits")).longValue());
 
     // make sure normal "fq" parameters also hit the cache the same way
     assertJQ(req("q", "doesnotexist3", "fq", "id:1", "fq", "qqq_s:X", "fq", "abcdefg")
@@ -421,8 +423,8 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     );
 
     hits += 3;
-    assertEquals(inserts, ((Long) filterCacheStats.getStatistics().get("inserts")).longValue());
-    assertEquals(hits, ((Long) filterCacheStats.getStatistics().get("hits")).longValue());
+    assertEquals(inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue());
+    assertEquals(hits, ((Long) filterCacheStats.getValue().get("hits")).longValue());
 
     // try a query deeply nested in a FQ
     assertJQ(req("q", "*:* doesnotexist4", "fq", "(id:* +(filter(id:1) filter(qqq_s:X) filter(abcdefg)) )")
@@ -431,8 +433,8 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
 
     inserts += 1;  // +1 for top level fq
     hits += 3;
-    assertEquals(inserts, ((Long) filterCacheStats.getStatistics().get("inserts")).longValue());
-    assertEquals(hits, ((Long) filterCacheStats.getStatistics().get("hits")).longValue());
+    assertEquals(inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue());
+    assertEquals(hits, ((Long) filterCacheStats.getValue().get("hits")).longValue());
 
     // retry the complex FQ and make sure hashCode/equals works as expected w/ filter queries
     assertJQ(req("q", "*:* doesnotexist5", "fq", "(id:* +(filter(id:1) filter(qqq_s:X) filter(abcdefg)) )")
@@ -440,8 +442,8 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
     );
 
     hits += 1;  // top-level fq should have been found.
-    assertEquals(inserts, ((Long) filterCacheStats.getStatistics().get("inserts")).longValue());
-    assertEquals(hits, ((Long) filterCacheStats.getStatistics().get("hits")).longValue());
+    assertEquals(inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue());
+    assertEquals(hits, ((Long) filterCacheStats.getValue().get("hits")).longValue());
 
 
     // try nested filter with multiple top-level args (i.e. a boolean query)
@@ -451,8 +453,8 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
 
     hits += 1;  // the inner filter
     inserts += 1; // the outer filter
-    assertEquals(inserts, ((Long) filterCacheStats.getStatistics().get("inserts")).longValue());
-    assertEquals(hits, ((Long) filterCacheStats.getStatistics().get("hits")).longValue());
+    assertEquals(inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue());
+    assertEquals(hits, ((Long) filterCacheStats.getValue().get("hits")).longValue());
 
     // test the score for a filter, and that default score is 0
     assertJQ(req("q", "+filter(*:*) +filter(id:1)", "fl", "id,score", "sort", "id asc")
@@ -995,4 +997,20 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
       }
     }
   }
+
+  @Test
+  public void testShingleQueries() throws Exception {
+    ModifiableSolrParams sowFalseParams = new ModifiableSolrParams();
+    sowFalseParams.add("sow", "false");
+
+    try (SolrQueryRequest req = req(sowFalseParams)) {
+      QParser qParser = QParser.getParser("shingle23:(A B C)", req);
+      Query q = qParser.getQuery();
+      assertEquals("Synonym(shingle23:A_B shingle23:A_B_C) shingle23:B_C", q.toString());
+    }
+
+    assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false")
+        , "/response/numFound==1"
+    );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
index bcb5f09..1561b3e 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
@@ -387,8 +387,21 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
             "}"
     );
 
-
-
+    // test filling in missing "allBuckets"
+    // test filling in "missing" bucket for partially refined facets
+    client.testJQ(params(p, "q", "*:*",
+        "json.facet", "{" +
+            "  cat :{type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, allBuckets:true, facet:{  xy:{type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:false}  }  }" +
+            ", cat2:{type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{  xy:{type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true }  }  }" +
+            ", cat3:{type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{  xy:{type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , facet:{f:'sum(${num_d})'}   }  }  }" +
+            "}"
+        )
+        , "facets=={ count:8" +
+            ", cat:{ allBuckets:{count:8}, buckets:[  {val:A, count:3, xy:{buckets:[{count:2, val:X}], allBuckets:{count:3}}}]  }" +
+            ",cat2:{ allBuckets:{count:8}, buckets:[  {val:A, count:4, xy:{buckets:[{count:3, val:X}], allBuckets:{count:4}}}]  }" +
+            ",cat3:{ allBuckets:{count:8}, buckets:[  {val:A, count:4, xy:{buckets:[{count:3, val:X, f:23.0}], allBuckets:{count:4, f:4.0}}}]  }" +
+            "}"
+    );
   }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index 95c403a..bad3de5 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -529,6 +529,7 @@ public class TestJsonFacets extends SolrTestCaseHS {
                 " , f2:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'max(${num_d})'}  } " +
                 " , f3:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'unique(${where_s})'}  } " +
                 " , f4:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'hll(${where_s})'}  } " +
+                " , f5:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'variance(${num_d})'}  } " +
                 "}"
         )
         , "facets=={ 'count':6, " +
@@ -536,6 +537,7 @@ public class TestJsonFacets extends SolrTestCaseHS {
             ", f2:{  'buckets':[{ val:'B', count:3, x:11.0 }, { val:'A', count:2, x:4.0 }]} " +
             ", f3:{  'buckets':[{ val:'A', count:2, x:2 },    { val:'B', count:3, x:2 }]} " +
             ", f4:{  'buckets':[{ val:'A', count:2, x:2 },    { val:'B', count:3, x:2 }]} " +
+            ", f5:{  'buckets':[{ val:'B', count:3, x:74.6666666666666 },    { val:'A', count:2, x:1.0 }]} " +
             "}"
     );
 
@@ -845,19 +847,18 @@ public class TestJsonFacets extends SolrTestCaseHS {
     );
 
 
-
     // stats at top level
     client.testJQ(params(p, "q", "*:*"
             , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', avg2:'avg(def(${num_d},0))', min1:'min(${num_d})', max1:'max(${num_d})'" +
                 ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" +
                 ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" +
-                ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)' }"
+                ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }"
         )
         , "facets=={ 'count':6, " +
             "sum1:3.0, sumsq1:247.0, avg1:0.6, avg2:0.5, min1:-9.0, max1:11.0" +
             ", numwhere:2, unique_num_i:4, unique_num_d:5, unique_date:5" +
             ", where_hll:2, hll_num_i:4, hll_num_d:5, hll_date:5" +
-            ", med:2.0, perc:[-9.0,2.0,11.0]  }"
+            ", med:2.0, perc:[-9.0,2.0,11.0], variance:49.04, stddev:7.002856560004639}"
     );
 
     // stats at top level, no matches
@@ -865,21 +866,20 @@ public class TestJsonFacets extends SolrTestCaseHS {
             , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" +
                 ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" +
                 ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" +
-                ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)' }"
+                ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }"
         )
         , "facets=={count:0 " +
-            "/* ,sum1:0.0, sumsq1:0.0, avg1:0.0, min1:'NaN', max1:'NaN', numwhere:0 */" +
+            "\n//  ,sum1:0.0, sumsq1:0.0, avg1:0.0, min1:'NaN', max1:'NaN', numwhere:0 \n" +
             " }"
     );
 
-
     // stats at top level, matching documents, but no values in the field
     // NOTE: this represents the current state of what is returned, not the ultimate desired state.
     client.testJQ(params(p, "q", "id:3"
         , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" +
             ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" +
             ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" +
-            ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)' }"
+            ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }"
         )
         , "facets=={count:1 " +
             ",sum1:0.0," +
@@ -894,11 +894,12 @@ public class TestJsonFacets extends SolrTestCaseHS {
             " where_hll:0," +
             " hll_num_i:0," +
             " hll_num_d:0," +
-            " hll_date:0" +
+            " hll_date:0," +
+            " variance:0.0," +
+            " stddev:0.0" +
             " }"
     );
 
-
     //
     // tests on a multi-valued field with actual multiple values, just to ensure that we are
     // using a multi-valued method for the rest of the tests when appropriate.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java b/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java
index 39fa791..8c2cec3 100644
--- a/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java
+++ b/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java
@@ -19,8 +19,7 @@ package org.apache.solr.search.join;
 import org.apache.lucene.search.join.ScoreMode;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.search.SolrCache;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.util.BaseTestHarness;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -32,6 +31,7 @@ import java.util.Collections;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.Locale;
+import java.util.Map;
 
 import javax.xml.xpath.XPathConstants;
 
@@ -276,15 +276,15 @@ public class BJQParserTest extends SolrTestCaseJ4 {
   @Test
   public void testCacheHit() throws IOException {
 
-    SolrCache parentFilterCache = (SolrCache) h.getCore().getInfoRegistry()
-        .get("perSegFilter");
+    MetricsMap parentFilterCache = (MetricsMap)h.getCore().getCoreMetricManager().getRegistry()
+        .getMetrics().get("CACHE.searcher.perSegFilter");
+    MetricsMap filterCache = (MetricsMap)h.getCore().getCoreMetricManager().getRegistry()
+        .getMetrics().get("CACHE.searcher.filterCache");
 
-    SolrCache filterCache = (SolrCache) h.getCore().getInfoRegistry()
-        .get("filterCache");
 
-    NamedList parentsBefore = parentFilterCache.getStatistics();
+    Map<String,Object> parentsBefore = parentFilterCache.getValue();
 
-    NamedList filtersBefore = filterCache.getStatistics();
+    Map<String,Object> filtersBefore = filterCache.getValue();
 
     // it should be weird enough to be uniq
     String parentFilter = "parent_s:([a TO c] [d TO f])";
@@ -298,7 +298,7 @@ public class BJQParserTest extends SolrTestCaseJ4 {
         "//*[@numFound='6']");
 
     assertEquals("didn't hit fqCache yet ", 0L,
-        delta("hits", filterCache.getStatistics(), filtersBefore));
+        delta("hits", filterCache.getValue(), filtersBefore));
 
     assertQ(
         "filter by join",
@@ -306,18 +306,18 @@ public class BJQParserTest extends SolrTestCaseJ4 {
             + "\"}child_s:l"), "//*[@numFound='6']");
 
     assertEquals("in cache mode every request lookups", 3,
-        delta("lookups", parentFilterCache.getStatistics(), parentsBefore));
+        delta("lookups", parentFilterCache.getValue(), parentsBefore));
     assertEquals("last two lookups causes hits", 2,
-        delta("hits", parentFilterCache.getStatistics(), parentsBefore));
+        delta("hits", parentFilterCache.getValue(), parentsBefore));
     assertEquals("the first lookup gets insert", 1,
-        delta("inserts", parentFilterCache.getStatistics(), parentsBefore));
+        delta("inserts", parentFilterCache.getValue(), parentsBefore));
 
 
     assertEquals("true join query is cached in fqCache", 1L,
-        delta("lookups", filterCache.getStatistics(), filtersBefore));
+        delta("lookups", filterCache.getValue(), filtersBefore));
   }
   
-  private long delta(String key, NamedList a, NamedList b) {
+  private long delta(String key, Map<String,Object> a, Map<String,Object> b) {
     return (Long) a.get(key) - (Long) b.get(key);
   }
 


[14/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/FieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java
index 016e166..3519283 100644
--- a/solr/core/src/java/org/apache/solr/schema/FieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java
@@ -453,7 +453,7 @@ public abstract class FieldType extends FieldProperties {
   }
   
   /**
-   * DocValues is not enabled for a field, but it's indexed, docvalues can be constructed 
+   * If DocValues is not enabled for a field, but it's indexed, docvalues can be constructed 
    * on the fly (uninverted, aka fieldcache) on the first request to sort, facet, etc. 
    * This specifies the structure to use.
    * 
@@ -833,7 +833,8 @@ public abstract class FieldType extends FieldProperties {
 
   private static final String POSTINGS_FORMAT = "postingsFormat";
   private static final String DOC_VALUES_FORMAT = "docValuesFormat";
-  private static final String AUTO_GENERATE_PHRASE_QUERIES = "autoGeneratePhraseQueries";
+  protected static final String AUTO_GENERATE_PHRASE_QUERIES = "autoGeneratePhraseQueries";
+  protected static final String ENABLE_GRAPH_QUERIES = "enableGraphQueries";
   private static final String ARGS = "args";
   private static final String POSITION_INCREMENT_GAP = "positionIncrementGap";
 
@@ -856,6 +857,7 @@ public abstract class FieldType extends FieldProperties {
       }
       if (this instanceof TextField) {
         namedPropertyValues.add(AUTO_GENERATE_PHRASE_QUERIES, ((TextField) this).getAutoGeneratePhraseQueries());
+        namedPropertyValues.add(ENABLE_GRAPH_QUERIES, ((TextField) this).getEnableGraphQueries());
       }
       namedPropertyValues.add(getPropertyName(INDEXED), hasProperty(INDEXED));
       namedPropertyValues.add(getPropertyName(STORED), hasProperty(STORED));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FloatPointField.java b/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
index 0f42cfd..fe9c753 100644
--- a/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/FloatPointField.java
@@ -106,6 +106,9 @@ public class FloatPointField extends PointField implements FloatValueFieldType {
   @Override
   public Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVal) {
     assert externalVal.size() > 0;
+    if (!field.indexed()) {
+      return super.getSetQuery(parser, field, externalVal);
+    }
     float[] values = new float[externalVal.size()];
     int i = 0;
     for (String val:externalVal) {
@@ -148,7 +151,7 @@ public class FloatPointField extends PointField implements FloatValueFieldType {
   @Override
   public Type getUninversionType(SchemaField sf) {
     if (sf.multiValued()) {
-      return Type.SORTED_FLOAT;
+      return null;
     } else {
       return Type.FLOAT_POINT;
     }
@@ -167,8 +170,6 @@ public class FloatPointField extends PointField implements FloatValueFieldType {
 
   @Override
   public IndexableField createField(SchemaField field, Object value) {
-    if (!isFieldUsed(field)) return null;
-
     float floatValue = (value instanceof Number) ? ((Number) value).floatValue() : Float.parseFloat(value.toString());
     return new FloatPoint(field.getName(), floatValue);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
index 3de59ee..2ea63d4 100644
--- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
@@ -46,7 +46,6 @@ import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.IndexOptions;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.MultiFields;
@@ -375,12 +374,14 @@ public class IndexSchema {
   }
   
   public Map<String,UninvertingReader.Type> getUninversionMap(IndexReader reader) {
-    Map<String,UninvertingReader.Type> map = new HashMap<>();
+    final Map<String,UninvertingReader.Type> map = new HashMap<>();
     for (FieldInfo f : MultiFields.getMergedFieldInfos(reader)) {
-      if (f.getDocValuesType() == DocValuesType.NONE && f.getIndexOptions() != IndexOptions.NONE) {
-        SchemaField sf = getFieldOrNull(f.name);
-        if (sf != null) {
-          UninvertingReader.Type type = sf.getType().getUninversionType(sf);
+      if (f.getDocValuesType() == DocValuesType.NONE) {
+        // we have a field (of some kind) in the reader w/o DocValues
+        // if we have an equivilent indexed=true field in the schema, trust it's uninversion type (if any)
+        final SchemaField sf = getFieldOrNull(f.name);
+        if (sf != null && sf.indexed()) {
+          final UninvertingReader.Type type = sf.getType().getUninversionType(sf);
           if (type != null) {
             map.put(f.name, type);
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/IntPointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/IntPointField.java b/solr/core/src/java/org/apache/solr/schema/IntPointField.java
index 5eaf7e0..f47f450 100644
--- a/solr/core/src/java/org/apache/solr/schema/IntPointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/IntPointField.java
@@ -103,6 +103,9 @@ public class IntPointField extends PointField implements IntValueFieldType {
   @Override
   public Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVal) {
     assert externalVal.size() > 0;
+    if (!field.indexed()) {
+      return super.getSetQuery(parser, field, externalVal);
+    }
     int[] values = new int[externalVal.size()];
     int i = 0;
     for (String val:externalVal) {
@@ -145,7 +148,7 @@ public class IntPointField extends PointField implements IntValueFieldType {
   @Override
   public Type getUninversionType(SchemaField sf) {
     if (sf.multiValued()) {
-      return Type.SORTED_INTEGER;
+      return null; 
     } else {
       return Type.INTEGER_POINT;
     }
@@ -159,8 +162,6 @@ public class IntPointField extends PointField implements IntValueFieldType {
 
   @Override
   public IndexableField createField(SchemaField field, Object value) {
-    if (!isFieldUsed(field)) return null;
-
     int intValue = (value instanceof Number) ? ((Number) value).intValue() : Integer.parseInt(value.toString());
     return new IntPoint(field.getName(), intValue);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/LongPointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/LongPointField.java b/solr/core/src/java/org/apache/solr/schema/LongPointField.java
index e58fbcf..bef6c47 100644
--- a/solr/core/src/java/org/apache/solr/schema/LongPointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/LongPointField.java
@@ -102,6 +102,9 @@ public class LongPointField extends PointField implements LongValueFieldType {
   @Override
   public Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVal) {
     assert externalVal.size() > 0;
+    if (!field.indexed()) {
+      return super.getSetQuery(parser, field, externalVal);
+    }
     long[] values = new long[externalVal.size()];
     int i = 0;
     for (String val:externalVal) {
@@ -144,7 +147,7 @@ public class LongPointField extends PointField implements LongValueFieldType {
   @Override
   public Type getUninversionType(SchemaField sf) {
     if (sf.multiValued()) {
-      return Type.SORTED_LONG;
+      return null;
     } else {
       return Type.LONG_POINT;
     }
@@ -164,8 +167,6 @@ public class LongPointField extends PointField implements LongValueFieldType {
 
   @Override
   public IndexableField createField(SchemaField field, Object value) {
-    if (!isFieldUsed(field)) return null;
-
     long longValue = (value instanceof Number) ? ((Number) value).longValue() : Long.parseLong(value.toString());
     return new LongPoint(field.getName(), longValue);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/PointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PointField.java b/solr/core/src/java/org/apache/solr/schema/PointField.java
index 91d3eff..cad3c7e 100644
--- a/solr/core/src/java/org/apache/solr/schema/PointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/PointField.java
@@ -21,6 +21,7 @@ import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.Date;
 import java.util.List;
 
 import org.apache.lucene.document.NumericDocValuesField;
@@ -111,7 +112,9 @@ public abstract class PointField extends NumericFieldType {
   }
 
   @Override
-  public abstract Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVals);
+  public Query getSetQuery(QParser parser, SchemaField field, Collection<String> externalVals) {
+    return super.getSetQuery(parser, field, externalVals);
+  }
 
   @Override
   public Query getFieldQuery(QParser parser, SchemaField field, String externalVal) {
@@ -204,34 +207,48 @@ public abstract class PointField extends NumericFieldType {
 
   @Override
   public List<IndexableField> createFields(SchemaField sf, Object value) {
-    if (!(sf.hasDocValues() || sf.stored())) {
-      return Collections.singletonList(createField(sf, value));
+    if (!isFieldUsed(sf)) {
+      return Collections.emptyList();
+    }
+    List<IndexableField> fields = new ArrayList<>(3);
+    IndexableField field = null;
+    if (sf.indexed()) {
+      field = createField(sf, value);
+      fields.add(field);
     }
-    List<IndexableField> fields = new ArrayList<>();
-    final IndexableField field = createField(sf, value);
-    fields.add(field);
     
     if (sf.hasDocValues()) {
+      final Number numericValue;
+      if (field == null) {
+        final Object nativeTypeObject = toNativeType(value);
+        if (getNumberType() == NumberType.DATE) {
+          numericValue = ((Date)nativeTypeObject).getTime();
+        } else {
+          numericValue = (Number) nativeTypeObject;
+        }
+      } else {
+        numericValue = field.numericValue();
+      }
       final long bits;
       if (!sf.multiValued()) {
-        if (field.numericValue() instanceof Integer || field.numericValue() instanceof Long) {
-          bits = field.numericValue().longValue();
-        } else if (field.numericValue() instanceof Float) {
-          bits = Float.floatToIntBits(field.numericValue().floatValue());
+        if (numericValue instanceof Integer || numericValue instanceof Long) {
+          bits = numericValue.longValue();
+        } else if (numericValue instanceof Float) {
+          bits = Float.floatToIntBits(numericValue.floatValue());
         } else {
-          assert field.numericValue() instanceof Double;
-          bits = Double.doubleToLongBits(field.numericValue().doubleValue());
+          assert numericValue instanceof Double;
+          bits = Double.doubleToLongBits(numericValue.doubleValue());
         }
         fields.add(new NumericDocValuesField(sf.getName(), bits));
       } else {
         // MultiValued
-        if (field.numericValue() instanceof Integer || field.numericValue() instanceof Long) {
-          bits = field.numericValue().longValue();
-        } else if (field.numericValue() instanceof Float) {
-          bits = NumericUtils.floatToSortableInt(field.numericValue().floatValue());
+        if (numericValue instanceof Integer || numericValue instanceof Long) {
+          bits = numericValue.longValue();
+        } else if (numericValue instanceof Float) {
+          bits = NumericUtils.floatToSortableInt(numericValue.floatValue());
         } else {
-          assert field.numericValue() instanceof Double;
-          bits = NumericUtils.doubleToSortableLong(field.numericValue().doubleValue());
+          assert numericValue instanceof Double;
+          bits = NumericUtils.doubleToSortableLong(numericValue.doubleValue());
         }
         fields.add(new SortedNumericDocValuesField(sf.getName(), bits));
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/SchemaField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaField.java b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
index 1e18ee0..c2e8cca 100644
--- a/solr/core/src/java/org/apache/solr/schema/SchemaField.java
+++ b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
@@ -161,20 +161,17 @@ public final class SchemaField extends FieldProperties implements IndexableField
    * @see FieldType#getSortField
    */
   public void checkSortability() throws SolrException {
-    if (! (indexed() || hasDocValues()) ) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
-                              "can not sort on a field which is neither indexed nor has doc values: " 
-                              + getName());
-    }
     if ( multiValued() ) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
                               "can not sort on multivalued field: " 
                               + getName());
     }
-    if (this.type.isPointField() && !hasDocValues()) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
-                              "can not sort on a PointField without doc values: " 
-                              + getName());
+    if (! hasDocValues() ) {
+      if ( ! ( indexed() && null != this.type.getUninversionType(this) ) ) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
+                                "can not sort on a field w/o docValues unless it is indexed and supports Uninversion: " 
+                                + getName());
+      }
     }
   }
 
@@ -187,22 +184,18 @@ public final class SchemaField extends FieldProperties implements IndexableField
    * @see FieldType#getValueSource
    */
   public void checkFieldCacheSource() throws SolrException {
-    if (! (indexed() || hasDocValues()) ) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
-                              "can not use FieldCache on a field which is neither indexed nor has doc values: " 
-                              + getName());
-    }
     if ( multiValued() ) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
                               "can not use FieldCache on multivalued field: " 
                               + getName());
     }
-    if (this.type.isPointField() && !hasDocValues()) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
-                              "Point fields can't use FieldCache. Use docValues=true for field: " 
-                              + getName());
+    if (! hasDocValues() ) {
+      if ( ! ( indexed() && null != this.type.getUninversionType(this) ) ) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
+                                "can not use FieldCache on a field w/o docValues unless it is indexed and supports Uninversion: " 
+                                + getName());
+      }
     }
-    
   }
 
   static SchemaField create(String name, FieldType ft, Map<String,?> props) {
@@ -343,6 +336,7 @@ public final class SchemaField extends FieldProperties implements IndexableField
       properties.add(getPropertyName(OMIT_POSITIONS), omitPositions());
       properties.add(getPropertyName(STORE_OFFSETS), storeOffsetsWithPositions());
       properties.add(getPropertyName(MULTIVALUED), multiValued());
+      properties.add(getPropertyName(LARGE_FIELD), isLarge());
       if (sortMissingFirst()) {
         properties.add(getPropertyName(SORT_MISSING_FIRST), sortMissingFirst());
       } else if (sortMissingLast()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
index 7092c09..1658e67 100644
--- a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
+++ b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
@@ -127,7 +127,7 @@ public class SchemaManager {
             latestVersion = ZkController.persistConfigResourceToZooKeeper
                 (zkLoader, managedIndexSchema.getSchemaZkVersion(), managedIndexSchema.getResourceName(),
                  sw.toString().getBytes(StandardCharsets.UTF_8), true);
-            req.getCore().getCoreDescriptor().getCoreContainer().reload(req.getCore().getName());
+            req.getCore().getCoreContainer().reload(req.getCore().getName());
             break;
           } catch (ZkController.ResourceModifiedInZkException e) {
             log.info("Schema was modified by another node. Retrying..");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/TextField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/TextField.java b/solr/core/src/java/org/apache/solr/schema/TextField.java
index 3d34df3..d8bae24 100644
--- a/solr/core/src/java/org/apache/solr/schema/TextField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TextField.java
@@ -41,6 +41,7 @@ import org.apache.solr.uninverting.UninvertingReader.Type;
  */
 public class TextField extends FieldType {
   protected boolean autoGeneratePhraseQueries;
+  protected boolean enableGraphQueries;
 
   /**
    * Analyzer set by schema for text types to use when searching fields
@@ -69,9 +70,15 @@ public class TextField extends FieldType {
     } else {
       autoGeneratePhraseQueries = true;
     }
-    String autoGeneratePhraseQueriesStr = args.remove("autoGeneratePhraseQueries");
+    String autoGeneratePhraseQueriesStr = args.remove(AUTO_GENERATE_PHRASE_QUERIES);
     if (autoGeneratePhraseQueriesStr != null)
       autoGeneratePhraseQueries = Boolean.parseBoolean(autoGeneratePhraseQueriesStr);
+    
+    enableGraphQueries = true;
+    String enableGraphQueriesStr = args.remove(ENABLE_GRAPH_QUERIES);
+    if (enableGraphQueriesStr != null)
+      enableGraphQueries = Boolean.parseBoolean(enableGraphQueriesStr);
+
     super.init(schema, args);    
   }
 
@@ -93,6 +100,10 @@ public class TextField extends FieldType {
   public boolean getAutoGeneratePhraseQueries() {
     return autoGeneratePhraseQueries;
   }
+  
+  public boolean getEnableGraphQueries() {
+    return enableGraphQueries;
+  }
 
   @Override
   public SortField getSortField(SchemaField field, boolean reverse) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java
index ee65fe8..e719404 100644
--- a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java
+++ b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java
@@ -55,7 +55,7 @@ public class ZkIndexSchemaReader implements OnReconnect {
     solrCore.addCloseHook(new CloseHook() {
       @Override
       public void preClose(SolrCore core) {
-        CoreContainer cc = core.getCoreDescriptor().getCoreContainer();
+        CoreContainer cc = core.getCoreContainer();
         if (cc.isZooKeeperAware()) {
           log.debug("Removing ZkIndexSchemaReader OnReconnect listener as core "+core.getName()+" is shutting down.");
           ZkIndexSchemaReader.this.isRemoved = true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
index 9825f72..07d7e51 100644
--- a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
@@ -1080,7 +1080,8 @@ public class ExtendedDismaxQParser extends QParser {
     
     @Override
     protected Query newFieldQuery(Analyzer analyzer, String field, String queryText, 
-                                  boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+                                  boolean quoted, boolean fieldAutoGenPhraseQueries, boolean enableGraphQueries)
+        throws SyntaxError {
       Analyzer actualAnalyzer;
       if (removeStopFilter) {
         if (nonStopFilterAnalyzerPerField == null) {
@@ -1093,7 +1094,7 @@ public class ExtendedDismaxQParser extends QParser {
       } else {
         actualAnalyzer = parser.getReq().getSchema().getFieldType(field).getQueryAnalyzer();
       }
-      return super.newFieldQuery(actualAnalyzer, field, queryText, quoted, fieldAutoGenPhraseQueries);
+      return super.newFieldQuery(actualAnalyzer, field, queryText, quoted, fieldAutoGenPhraseQueries, enableGraphQueries);
     }
     
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
index 9c4b892..cb699b2 100644
--- a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
@@ -15,15 +15,17 @@
  * limitations under the License.
  */
 package org.apache.solr.search;
+
+import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.util.ConcurrentLRUCache;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
 
-import java.io.Serializable;
 import java.lang.invoke.MethodHandles;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -57,6 +59,10 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>
 
   private long maxRamBytes;
 
+  private MetricsMap cacheMap;
+  private Set<String> metricNames = new HashSet<>();
+  private MetricRegistry registry;
+
   @Override
   public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
     super.init(args, regenerator);
@@ -215,68 +221,80 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>
   }
 
   @Override
-  public String getSource() {
-    return null;
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
-
   @Override
-  public NamedList getStatistics() {
-    NamedList<Serializable> lst = new SimpleOrderedMap<>();
-    if (cache == null)  return lst;
-    ConcurrentLRUCache.Stats stats = cache.getStats();
-    long lookups = stats.getCumulativeLookups();
-    long hits = stats.getCumulativeHits();
-    long inserts = stats.getCumulativePuts();
-    long evictions = stats.getCumulativeEvictions();
-    long size = stats.getCurrentSize();
-    long clookups = 0;
-    long chits = 0;
-    long cinserts = 0;
-    long cevictions = 0;
-
-    // NOTE: It is safe to iterate on a CopyOnWriteArrayList
-    for (ConcurrentLRUCache.Stats statistiscs : statsList) {
-      clookups += statistiscs.getCumulativeLookups();
-      chits += statistiscs.getCumulativeHits();
-      cinserts += statistiscs.getCumulativePuts();
-      cevictions += statistiscs.getCumulativeEvictions();
-    }
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    cacheMap = new MetricsMap((detailed, map) -> {
+      if (cache != null) {
+        ConcurrentLRUCache.Stats stats = cache.getStats();
+        long lookups = stats.getCumulativeLookups();
+        long hits = stats.getCumulativeHits();
+        long inserts = stats.getCumulativePuts();
+        long evictions = stats.getCumulativeEvictions();
+        long size = stats.getCurrentSize();
+        long clookups = 0;
+        long chits = 0;
+        long cinserts = 0;
+        long cevictions = 0;
+
+        // NOTE: It is safe to iterate on a CopyOnWriteArrayList
+        for (ConcurrentLRUCache.Stats statistiscs : statsList) {
+          clookups += statistiscs.getCumulativeLookups();
+          chits += statistiscs.getCumulativeHits();
+          cinserts += statistiscs.getCumulativePuts();
+          cevictions += statistiscs.getCumulativeEvictions();
+        }
+
+        map.put("lookups", lookups);
+        map.put("hits", hits);
+        map.put("hitratio", calcHitRatio(lookups, hits));
+        map.put("inserts", inserts);
+        map.put("evictions", evictions);
+        map.put("size", size);
+
+        map.put("warmupTime", warmupTime);
+        map.put("cumulative_lookups", clookups);
+        map.put("cumulative_hits", chits);
+        map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
+        map.put("cumulative_inserts", cinserts);
+        map.put("cumulative_evictions", cevictions);
+
+        if (detailed && showItems != 0) {
+          Map items = cache.getLatestAccessedItems( showItems == -1 ? Integer.MAX_VALUE : showItems );
+          for (Map.Entry e : (Set <Map.Entry>)items.entrySet()) {
+            Object k = e.getKey();
+            Object v = e.getValue();
+
+            String ks = "item_" + k;
+            String vs = v.toString();
+            map.put(ks,vs);
+          }
 
-    lst.add("lookups", lookups);
-    lst.add("hits", hits);
-    lst.add("hitratio", calcHitRatio(lookups, hits));
-    lst.add("inserts", inserts);
-    lst.add("evictions", evictions);
-    lst.add("size", size);
-
-    lst.add("warmupTime", warmupTime);
-    lst.add("cumulative_lookups", clookups);
-    lst.add("cumulative_hits", chits);
-    lst.add("cumulative_hitratio", calcHitRatio(clookups, chits));
-    lst.add("cumulative_inserts", cinserts);
-    lst.add("cumulative_evictions", cevictions);
-
-    if (showItems != 0) {
-      Map items = cache.getLatestAccessedItems( showItems == -1 ? Integer.MAX_VALUE : showItems );
-      for (Map.Entry e : (Set <Map.Entry>)items.entrySet()) {
-        Object k = e.getKey();
-        Object v = e.getValue();
-
-        String ks = "item_" + k;
-        String vs = v.toString();
-        lst.add(ks,vs);
+        }
       }
-      
-    }
+    });
+    manager.registerGauge(this, registryName, cacheMap, true, scope, getCategory().toString());
+  }
+
+  // for unit tests only
+  MetricsMap getMetricsMap() {
+    return cacheMap;
+  }
 
-    return lst;
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   @Override
   public String toString() {
-    return name() + getStatistics().toString();
+    return name() + cacheMap != null ? cacheMap.getValue().toString() : "";
   }
+
 }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/Grouping.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java
index 302383a..eeb6b66 100644
--- a/solr/core/src/java/org/apache/solr/search/Grouping.java
+++ b/solr/core/src/java/org/apache/solr/search/Grouping.java
@@ -47,17 +47,14 @@ import org.apache.lucene.search.TopFieldCollector;
 import org.apache.lucene.search.TopScoreDocCollector;
 import org.apache.lucene.search.TotalHitCountCollector;
 import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
+import org.apache.lucene.search.grouping.AllGroupsCollector;
+import org.apache.lucene.search.grouping.FirstPassGroupingCollector;
 import org.apache.lucene.search.grouping.GroupDocs;
 import org.apache.lucene.search.grouping.SearchGroup;
+import org.apache.lucene.search.grouping.TermGroupSelector;
 import org.apache.lucene.search.grouping.TopGroups;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector;
-import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.function.FunctionSecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
-import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector;
+import org.apache.lucene.search.grouping.TopGroupsCollector;
+import org.apache.lucene.search.grouping.ValueSourceGroupSelector;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.mutable.MutableValue;
 import org.apache.solr.common.SolrException;
@@ -700,10 +697,10 @@ public class Grouping {
   public class CommandField extends Command<BytesRef> {
 
     public String groupBy;
-    TermFirstPassGroupingCollector firstPass;
-    TermSecondPassGroupingCollector secondPass;
+    FirstPassGroupingCollector<BytesRef> firstPass;
+    TopGroupsCollector<BytesRef> secondPass;
 
-    TermAllGroupsCollector allGroupsCollector;
+    AllGroupsCollector<BytesRef> allGroupsCollector;
 
     // If offset falls outside the number of documents a group can provide use this collector instead of secondPass
     TotalHitCountCollector fallBackCollector;
@@ -729,7 +726,7 @@ public class Grouping {
       }
 
       groupSort = groupSort == null ? Sort.RELEVANCE : groupSort;
-      firstPass = new TermFirstPassGroupingCollector(groupBy, groupSort, actualGroupsToFind);
+      firstPass = new FirstPassGroupingCollector<>(new TermGroupSelector(groupBy), groupSort, actualGroupsToFind);
       return firstPass;
     }
 
@@ -739,14 +736,14 @@ public class Grouping {
     @Override
     protected Collector createSecondPassCollector() throws IOException {
       if (actualGroupsToFind <= 0) {
-        allGroupsCollector = new TermAllGroupsCollector(groupBy);
+        allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(groupBy));
         return totalCount == TotalCount.grouped ? allGroupsCollector : null;
       }
 
       topGroups = format == Format.grouped ? firstPass.getTopGroups(offset, false) : firstPass.getTopGroups(0, false);
       if (topGroups == null) {
         if (totalCount == TotalCount.grouped) {
-          allGroupsCollector = new TermAllGroupsCollector(groupBy);
+          allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(groupBy));
           fallBackCollector = new TotalHitCountCollector();
           return MultiCollector.wrap(allGroupsCollector, fallBackCollector);
         } else {
@@ -758,12 +755,12 @@ public class Grouping {
       int groupedDocsToCollect = getMax(groupOffset, docsPerGroup, maxDoc);
       groupedDocsToCollect = Math.max(groupedDocsToCollect, 1);
       Sort withinGroupSort = this.withinGroupSort != null ? this.withinGroupSort : Sort.RELEVANCE;
-      secondPass = new TermSecondPassGroupingCollector(
-          groupBy, topGroups, groupSort, withinGroupSort, groupedDocsToCollect, needScores, needScores, false
+      secondPass = new TopGroupsCollector<>(new TermGroupSelector(groupBy),
+          topGroups, groupSort, withinGroupSort, groupedDocsToCollect, needScores, needScores, false
       );
 
       if (totalCount == TotalCount.grouped) {
-        allGroupsCollector = new TermAllGroupsCollector(groupBy);
+        allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(groupBy));
         return MultiCollector.wrap(secondPass, allGroupsCollector);
       } else {
         return secondPass;
@@ -776,7 +773,7 @@ public class Grouping {
     @Override
     public AllGroupHeadsCollector<?> createAllGroupCollector() throws IOException {
       Sort sortWithinGroup = withinGroupSort != null ? withinGroupSort : Sort.RELEVANCE;
-      return TermAllGroupHeadsCollector.create(groupBy, sortWithinGroup);
+      return AllGroupHeadsCollector.newCollector(new TermGroupSelector(groupBy), sortWithinGroup);
     }
 
     /**
@@ -921,11 +918,15 @@ public class Grouping {
     public ValueSource groupBy;
     Map context;
 
-    FunctionFirstPassGroupingCollector firstPass;
-    FunctionSecondPassGroupingCollector secondPass;
+    private ValueSourceGroupSelector newSelector() {
+      return new ValueSourceGroupSelector(groupBy, context);
+    }
+
+    FirstPassGroupingCollector<MutableValue> firstPass;
+    TopGroupsCollector<MutableValue> secondPass;
     // If offset falls outside the number of documents a group can provide use this collector instead of secondPass
     TotalHitCountCollector fallBackCollector;
-    FunctionAllGroupsCollector allGroupsCollector;
+    AllGroupsCollector<MutableValue> allGroupsCollector;
     Collection<SearchGroup<MutableValue>> topGroups;
 
     /**
@@ -950,7 +951,7 @@ public class Grouping {
       }
 
       groupSort = groupSort == null ? Sort.RELEVANCE : groupSort;
-      firstPass = new FunctionFirstPassGroupingCollector(groupBy, context, searcher.weightSort(groupSort), actualGroupsToFind);
+      firstPass = new FirstPassGroupingCollector<>(newSelector(), searcher.weightSort(groupSort), actualGroupsToFind);
       return firstPass;
     }
 
@@ -960,14 +961,14 @@ public class Grouping {
     @Override
     protected Collector createSecondPassCollector() throws IOException {
       if (actualGroupsToFind <= 0) {
-        allGroupsCollector = new FunctionAllGroupsCollector(groupBy, context);
+        allGroupsCollector = new AllGroupsCollector<>(newSelector());
         return totalCount == TotalCount.grouped ? allGroupsCollector : null;
       }
 
       topGroups = format == Format.grouped ? firstPass.getTopGroups(offset, false) : firstPass.getTopGroups(0, false);
       if (topGroups == null) {
         if (totalCount == TotalCount.grouped) {
-          allGroupsCollector = new FunctionAllGroupsCollector(groupBy, context);
+          allGroupsCollector = new AllGroupsCollector<>(newSelector());
           fallBackCollector = new TotalHitCountCollector();
           return MultiCollector.wrap(allGroupsCollector, fallBackCollector);
         } else {
@@ -979,12 +980,12 @@ public class Grouping {
       int groupdDocsToCollect = getMax(groupOffset, docsPerGroup, maxDoc);
       groupdDocsToCollect = Math.max(groupdDocsToCollect, 1);
       Sort withinGroupSort = this.withinGroupSort != null ? this.withinGroupSort : Sort.RELEVANCE;
-      secondPass = new FunctionSecondPassGroupingCollector(
-          topGroups, groupSort, withinGroupSort, groupdDocsToCollect, needScores, needScores, false, groupBy, context
+      secondPass = new TopGroupsCollector<>(newSelector(),
+          topGroups, groupSort, withinGroupSort, groupdDocsToCollect, needScores, needScores, false
       );
 
       if (totalCount == TotalCount.grouped) {
-        allGroupsCollector = new FunctionAllGroupsCollector(groupBy, context);
+        allGroupsCollector = new AllGroupsCollector<>(newSelector());
         return MultiCollector.wrap(secondPass, allGroupsCollector);
       } else {
         return secondPass;
@@ -994,7 +995,7 @@ public class Grouping {
     @Override
     public AllGroupHeadsCollector<?> createAllGroupCollector() throws IOException {
       Sort sortWithinGroup = withinGroupSort != null ? withinGroupSort : Sort.RELEVANCE;
-      return new FunctionAllGroupHeadsCollector(groupBy, context, sortWithinGroup);
+      return AllGroupHeadsCollector.newCollector(newSelector(), sortWithinGroup);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
index bf64060..fca9a34 100644
--- a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
@@ -84,7 +84,7 @@ public class JoinQParserPlugin extends QParserPlugin {
         long fromCoreOpenTime = 0;
 
         if (fromIndex != null && !fromIndex.equals(req.getCore().getCoreDescriptor().getName()) ) {
-          CoreContainer container = req.getCore().getCoreDescriptor().getCoreContainer();
+          CoreContainer container = req.getCore().getCoreContainer();
 
           // if in SolrCloud mode, fromIndex should be the name of a single-sharded collection
           coreName = ScoreJoinQParserPlugin.getCoreName(fromIndex, container);
@@ -173,7 +173,7 @@ class JoinQuery extends Query {
           throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cross-core join must have SolrRequestInfo");
         }
 
-        CoreContainer container = searcher.getCore().getCoreDescriptor().getCoreContainer();
+        CoreContainer container = searcher.getCore().getCoreContainer();
         final SolrCore fromCore = container.getCore(fromIndex);
 
         if (fromCore == null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/LFUCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/LFUCache.java b/solr/core/src/java/org/apache/solr/search/LFUCache.java
index 2b593c6..82ba6d2 100644
--- a/solr/core/src/java/org/apache/solr/search/LFUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LFUCache.java
@@ -15,19 +15,19 @@
  * limitations under the License.
  */
 package org.apache.solr.search;
-import java.io.Serializable;
+
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.TimeUnit;
 
+import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrCore;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.util.ConcurrentLFUCache;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -64,6 +64,9 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
   private ConcurrentLFUCache<K, V> cache;
   private int showItems = 0;
   private Boolean timeDecay = true;
+  private MetricsMap cacheMap;
+  private Set<String> metricNames = new HashSet<>();
+  private MetricRegistry registry;
 
   @Override
   public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
@@ -212,11 +215,6 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
   }
 
   @Override
-  public String getVersion() {
-    return SolrCore.version;
-  }
-
-  @Override
   public String getDescription() {
     return description;
   }
@@ -226,16 +224,6 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
     return Category.CACHE;
   }
 
-  @Override
-  public String getSource() {
-    return null;
-  }
-
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
   // returns a ratio, not a percent.
   private static String calcHitRatio(long lookups, long hits) {
     if (lookups == 0) return "0.00";
@@ -246,62 +234,81 @@ public class LFUCache<K, V> implements SolrCache<K, V> {
   }
 
   @Override
-  public NamedList getStatistics() {
-    NamedList<Serializable> lst = new SimpleOrderedMap<>();
-    if (cache == null) return lst;
-    ConcurrentLFUCache.Stats stats = cache.getStats();
-    long lookups = stats.getCumulativeLookups();
-    long hits = stats.getCumulativeHits();
-    long inserts = stats.getCumulativePuts();
-    long evictions = stats.getCumulativeEvictions();
-    long size = stats.getCurrentSize();
-
-    lst.add("lookups", lookups);
-    lst.add("hits", hits);
-    lst.add("hitratio", calcHitRatio(lookups, hits));
-    lst.add("inserts", inserts);
-    lst.add("evictions", evictions);
-    lst.add("size", size);
-
-    lst.add("warmupTime", warmupTime);
-    lst.add("timeDecay", timeDecay);
-
-    long clookups = 0;
-    long chits = 0;
-    long cinserts = 0;
-    long cevictions = 0;
-
-    // NOTE: It is safe to iterate on a CopyOnWriteArrayList
-    for (ConcurrentLFUCache.Stats statistics : statsList) {
-      clookups += statistics.getCumulativeLookups();
-      chits += statistics.getCumulativeHits();
-      cinserts += statistics.getCumulativePuts();
-      cevictions += statistics.getCumulativeEvictions();
-    }
-    lst.add("cumulative_lookups", clookups);
-    lst.add("cumulative_hits", chits);
-    lst.add("cumulative_hitratio", calcHitRatio(clookups, chits));
-    lst.add("cumulative_inserts", cinserts);
-    lst.add("cumulative_evictions", cevictions);
-
-    if (showItems != 0) {
-      Map items = cache.getMostUsedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
-      for (Map.Entry e : (Set<Map.Entry>) items.entrySet()) {
-        Object k = e.getKey();
-        Object v = e.getValue();
-
-        String ks = "item_" + k;
-        String vs = v.toString();
-        lst.add(ks, vs);
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    cacheMap = new MetricsMap((detailed, map) -> {
+      if (cache != null) {
+        ConcurrentLFUCache.Stats stats = cache.getStats();
+        long lookups = stats.getCumulativeLookups();
+        long hits = stats.getCumulativeHits();
+        long inserts = stats.getCumulativePuts();
+        long evictions = stats.getCumulativeEvictions();
+        long size = stats.getCurrentSize();
+
+        map.put("lookups", lookups);
+        map.put("hits", hits);
+        map.put("hitratio", calcHitRatio(lookups, hits));
+        map.put("inserts", inserts);
+        map.put("evictions", evictions);
+        map.put("size", size);
+
+        map.put("warmupTime", warmupTime);
+        map.put("timeDecay", timeDecay);
+
+        long clookups = 0;
+        long chits = 0;
+        long cinserts = 0;
+        long cevictions = 0;
+
+        // NOTE: It is safe to iterate on a CopyOnWriteArrayList
+        for (ConcurrentLFUCache.Stats statistics : statsList) {
+          clookups += statistics.getCumulativeLookups();
+          chits += statistics.getCumulativeHits();
+          cinserts += statistics.getCumulativePuts();
+          cevictions += statistics.getCumulativeEvictions();
+        }
+        map.put("cumulative_lookups", clookups);
+        map.put("cumulative_hits", chits);
+        map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
+        map.put("cumulative_inserts", cinserts);
+        map.put("cumulative_evictions", cevictions);
+
+        if (detailed && showItems != 0) {
+          Map items = cache.getMostUsedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
+          for (Map.Entry e : (Set<Map.Entry>) items.entrySet()) {
+            Object k = e.getKey();
+            Object v = e.getValue();
+
+            String ks = "item_" + k;
+            String vs = v.toString();
+            map.put(ks, vs);
+          }
+
+        }
+
       }
+    });
+    manager.registerGauge(this, registryName, cacheMap, true, scope, getCategory().toString());
+  }
 
-    }
+  // for unit tests only
+  MetricsMap getMetricsMap() {
+    return cacheMap;
+  }
 
-    return lst;
+  @Override
+  public Set<String> getMetricNames() {
+    return metricNames;
+  }
+
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   @Override
   public String toString() {
-    return name + getStatistics().toString();
+    return name + cacheMap != null ? cacheMap.getValue().toString() : "";
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/LRUCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/LRUCache.java b/solr/core/src/java/org/apache/solr/search/LRUCache.java
index b178fb2..ce206fe 100644
--- a/solr/core/src/java/org/apache/solr/search/LRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LRUCache.java
@@ -19,18 +19,21 @@ package org.apache.solr.search;
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.LongAdder;
 
+import com.codahale.metrics.MetricRegistry;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.Accountables;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -55,6 +58,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
   static final long LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY =
       HASHTABLE_RAM_BYTES_PER_ENTRY
           + 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF; // previous & next references
+
   /// End copied code
 
   /* An instance of this class will be shared across multiple instances
@@ -82,6 +86,9 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
 
   private Map<K,V> map;
   private String description="LRU Cache";
+  private MetricsMap cacheMap;
+  private Set<String> metricNames = new HashSet<>();
+  private MetricRegistry registry;
 
   private long maxRamBytes = Long.MAX_VALUE;
   // The synchronization used for the map will be used to update this,
@@ -319,45 +326,56 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
   }
 
   @Override
-  public String getSource() {
-    return null;
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public NamedList getStatistics() {
-    NamedList lst = new SimpleOrderedMap();
-    synchronized (map) {
-      lst.add("lookups", lookups);
-      lst.add("hits", hits);
-      lst.add("hitratio", calcHitRatio(lookups,hits));
-      lst.add("inserts", inserts);
-      lst.add("evictions", evictions);
-      lst.add("size", map.size());
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    cacheMap = new MetricsMap((detailed, res) -> {
+      synchronized (map) {
+        res.put("lookups", lookups);
+        res.put("hits", hits);
+        res.put("hitratio", calcHitRatio(lookups,hits));
+        res.put("inserts", inserts);
+        res.put("evictions", evictions);
+        res.put("size", map.size());
+        if (maxRamBytes != Long.MAX_VALUE)  {
+          res.put("maxRamMB", maxRamBytes / 1024L / 1024L);
+          res.put("ramBytesUsed", ramBytesUsed());
+          res.put("evictionsRamUsage", evictionsRamUsage);
+        }
+      }
+      res.put("warmupTime", warmupTime);
+
+      long clookups = stats.lookups.longValue();
+      long chits = stats.hits.longValue();
+      res.put("cumulative_lookups", clookups);
+      res.put("cumulative_hits", chits);
+      res.put("cumulative_hitratio", calcHitRatio(clookups, chits));
+      res.put("cumulative_inserts", stats.inserts.longValue());
+      res.put("cumulative_evictions", stats.evictions.longValue());
       if (maxRamBytes != Long.MAX_VALUE)  {
-        lst.add("maxRamMB", maxRamBytes / 1024L / 1024L);
-        lst.add("ramBytesUsed", ramBytesUsed());
-        lst.add("evictionsRamUsage", evictionsRamUsage);
+        res.put("cumulative_evictionsRamUsage", stats.evictionsRamUsage.longValue());
       }
-    }
-    lst.add("warmupTime", warmupTime);
-    
-    long clookups = stats.lookups.longValue();
-    long chits = stats.hits.longValue();
-    lst.add("cumulative_lookups", clookups);
-    lst.add("cumulative_hits", chits);
-    lst.add("cumulative_hitratio", calcHitRatio(clookups, chits));
-    lst.add("cumulative_inserts", stats.inserts.longValue());
-    lst.add("cumulative_evictions", stats.evictions.longValue());
-    if (maxRamBytes != Long.MAX_VALUE)  {
-      lst.add("cumulative_evictionsRamUsage", stats.evictionsRamUsage.longValue());
-    }
-    
-    return lst;
+    });
+    manager.registerGauge(this, registryName, cacheMap, true, scope, getCategory().toString());
+  }
+
+  // for unit tests only
+  MetricsMap getMetricsMap() {
+    return cacheMap;
+  }
+
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   @Override
   public String toString() {
-    return name() + getStatistics().toString();
+    return name() + cacheMap != null ? cacheMap.getValue().toString() : "";
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/QParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/QParserPlugin.java b/solr/core/src/java/org/apache/solr/search/QParserPlugin.java
index 34089d2..872c618 100644
--- a/solr/core/src/java/org/apache/solr/search/QParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/QParserPlugin.java
@@ -16,14 +16,14 @@
  */
 package org.apache.solr.search;
 
-import java.net.URL;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.join.BlockJoinChildQParserPlugin;
 import org.apache.solr.search.join.BlockJoinParentQParserPlugin;
@@ -31,7 +31,7 @@ import org.apache.solr.search.join.GraphQParserPlugin;
 import org.apache.solr.search.mlt.MLTQParserPlugin;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 
-public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrInfoMBean {
+public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrInfoBean {
   /** internal use - name of the default parser */
   public static final String DEFAULT_QTYPE = LuceneQParserPlugin.NAME;
 
@@ -99,11 +99,6 @@ public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrI
   }
 
   @Override
-  public String getVersion() {
-    return null;
-  }
-
-  @Override
   public String getDescription() {
     return "";  // UI required non-null to work
   }
@@ -114,19 +109,10 @@ public abstract class QParserPlugin implements NamedListInitializedPlugin, SolrI
   }
 
   @Override
-  public String getSource() {
+  public Set<String> getMetricNames() {
     return null;
   }
 
-  @Override
-  public URL[] getDocs() {
-    return new URL[0];
-  }
-
-  @Override
-  public NamedList getStatistics() {
-    return null;
-  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/SolrCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCache.java b/solr/core/src/java/org/apache/solr/search/SolrCache.java
index 9a2d0fc..caa5c2c 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCache.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCache.java
@@ -16,7 +16,8 @@
  */
 package org.apache.solr.search;
 
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.SolrMetricProducer;
 
 import java.util.Map;
 
@@ -24,7 +25,7 @@ import java.util.Map;
 /**
  * Primary API for dealing with Solr's internal caches.
  */
-public interface SolrCache<K,V> extends SolrInfoMBean {
+public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
 
   /**
    * The initialization routine. Instance specific arguments are passed in

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/SolrCacheBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCacheBase.java b/solr/core/src/java/org/apache/solr/search/SolrCacheBase.java
index 85caa90..c388d54 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCacheBase.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCacheBase.java
@@ -18,11 +18,10 @@ package org.apache.solr.search;
 
 import java.math.BigDecimal;
 import java.math.RoundingMode;
-import java.net.URL;
 import java.util.Map;
 
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean.Category;
+import org.apache.solr.core.SolrInfoBean.Category;
 import org.apache.solr.search.SolrCache.State;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
@@ -106,10 +105,6 @@ public abstract class SolrCacheBase {
     return Category.CACHE;
   }
 
-  public URL[] getDocs() {
-    return null;
-  }
-  
   public void init(Map<String, String> args, CacheRegenerator regenerator) {
     this.regenerator = regenerator;
     state = State.CREATED;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
new file mode 100644
index 0000000..ffcc37d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import com.codahale.metrics.MetricRegistry;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.uninverting.UninvertingReader;
+
+/**
+ * A SolrInfoBean that provides introspection of the Solr FieldCache
+ *
+ */
+public class SolrFieldCacheBean implements SolrInfoBean, SolrMetricProducer {
+
+  private boolean disableEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryList");
+  private boolean disableJmxEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryListJmx");
+
+  private MetricRegistry registry;
+  private Set<String> metricNames = new HashSet<>();
+
+  @Override
+  public String getName() { return this.getClass().getName(); }
+  @Override
+  public String getDescription() {
+    return "Provides introspection of the Solr FieldCache ";
+  }
+  @Override
+  public Category getCategory() { return Category.CACHE; }
+  @Override
+  public Set<String> getMetricNames() {
+    return metricNames;
+  }
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
+      if (detailed && !disableEntryList && !disableJmxEntryList) {
+        UninvertingReader.FieldCacheStats fieldCacheStats = UninvertingReader.getUninvertedStats();
+        String[] entries = fieldCacheStats.info;
+        map.put("entries_count", entries.length);
+        map.put("total_size", fieldCacheStats.totalSize);
+        for (int i = 0; i < entries.length; i++) {
+          final String entry = entries[i];
+          map.put("entry#" + i, entry);
+        }
+      } else {
+        map.put("entries_count", UninvertingReader.getUninvertedStatsSize());
+      }
+    });
+    manager.register(this, registryName, metricsMap, true, "fieldCache", Category.CACHE.toString(), scope);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/SolrFieldCacheMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheMBean.java b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheMBean.java
deleted file mode 100644
index 642b708..0000000
--- a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheMBean.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.search;
-
-import java.net.URL;
-
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.JmxMonitoredMap.JmxAugmentedSolrInfoMBean;
-import org.apache.solr.core.SolrCore;
-import org.apache.solr.uninverting.UninvertingReader;
-
-/**
- * A SolrInfoMBean that provides introspection of the Solr FieldCache
- *
- */
-public class SolrFieldCacheMBean implements JmxAugmentedSolrInfoMBean {
-
-  private boolean disableEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryList");
-  private boolean disableJmxEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryListJmx");
-
-  @Override
-  public String getName() { return this.getClass().getName(); }
-  @Override
-  public String getVersion() { return SolrCore.version; }
-  @Override
-  public String getDescription() {
-    return "Provides introspection of the Solr FieldCache ";
-  }
-  @Override
-  public Category getCategory() { return Category.CACHE; } 
-  @Override
-  public String getSource() { return null; }
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-  @Override
-  public NamedList getStatistics() {
-    return getStats(!disableEntryList);
-  }
-
-  @Override
-  public NamedList getStatisticsForJmx() {
-    return getStats(!disableEntryList && !disableJmxEntryList);
-  }
-
-  private NamedList getStats(boolean listEntries) {
-    NamedList stats = new SimpleOrderedMap();
-    if (listEntries) {
-      UninvertingReader.FieldCacheStats fieldCacheStats = UninvertingReader.getUninvertedStats();
-      String[] entries = fieldCacheStats.info;
-      stats.add("entries_count", entries.length);
-      stats.add("total_size", fieldCacheStats.totalSize);
-      for (int i = 0; i < entries.length; i++) {
-        stats.add("entry#" + i, entries[i]);
-      }
-    } else {
-      stats.add("entries_count", UninvertingReader.getUninvertedStatsSize());
-    }
-    return stats;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 4207a9b..9d63aad 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -19,13 +19,13 @@ package org.apache.solr.search;
 import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
@@ -34,6 +34,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 
+import com.codahale.metrics.MetricRegistry;
 import com.google.common.collect.Iterables;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.DirectoryReader;
@@ -58,15 +59,15 @@ import org.apache.lucene.util.FixedBitSet;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.ObjectReleaseTracker;
-import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.index.SlowCompositeReaderWrapper;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
@@ -86,7 +87,7 @@ import org.slf4j.LoggerFactory;
  *
  * @since solr 0.9
  */
-public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrInfoMBean {
+public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrInfoBean, SolrMetricProducer {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -136,7 +137,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   private final String path;
   private boolean releaseDirectory;
 
-  private final NamedList<Object> readerStats;
+  private Set<String> metricNames = new HashSet<>();
 
   private static DirectoryReader getReader(SolrCore core, SolrIndexConfig config, DirectoryFactory directoryFactory,
                                            String path) throws IOException {
@@ -154,11 +155,26 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   private static DirectoryReader wrapReader(SolrCore core, DirectoryReader reader) throws IOException {
     assert reader != null;
     return ExitableDirectoryReader.wrap(
-        UninvertingReader.wrap(reader, core.getLatestSchema().getUninversionMap(reader)),
+        wrapUninvertingReaderPerSegment(core, reader),
         SolrQueryTimeoutImpl.getInstance());
   }
 
   /**
+   * If docvalues are enabled or disabled after data has already been indexed for a field, such that
+   * only some segments have docvalues, uninverting on the top level reader will cause 
+   * IllegalStateException to be thrown when trying to use a field with such mixed data. This is because
+   * the {@link IndexSchema#getUninversionMap(IndexReader)} method decides to put a field 
+   * into the uninverteding map only if *NO* segment in the index contains docvalues for that field.
+   * 
+   * Therefore, this class provides a uninverting map per segment such that for any field, 
+   * DocValues are used from segments if they exist and uninversion of the field is performed on the rest
+   * of the segments.
+   */
+   private static DirectoryReader wrapUninvertingReaderPerSegment(SolrCore core, DirectoryReader reader) throws IOException {
+     return UninvertingReader.wrap(reader, r -> core.getLatestSchema().getUninversionMap(r));
+   }
+
+  /**
    * Builds the necessary collector chain (via delegate wrapping) and executes the query against it. This method takes
    * into consideration both the explicitly provided collector and postFilter as well as any needed collector wrappers
    * for dealing with options specified in the QueryCommand.
@@ -302,7 +318,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     // We already have our own filter cache
     setQueryCache(null);
 
-    readerStats = snapStatistics(reader);
     // do this at the end since an exception in the constructor means we won't close
     numOpens.incrementAndGet();
     assert ObjectReleaseTracker.track(this);
@@ -404,10 +419,10 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   }
 
   /**
-   * Register sub-objects such as caches
+   * Register sub-objects such as caches and our own metrics
    */
   public void register() {
-    final Map<String,SolrInfoMBean> infoRegistry = core.getInfoRegistry();
+    final Map<String,SolrInfoBean> infoRegistry = core.getInfoRegistry();
     // register self
     infoRegistry.put(STATISTICS_KEY, this);
     infoRegistry.put(name, this);
@@ -415,6 +430,12 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
       cache.setState(SolrCache.State.LIVE);
       infoRegistry.put(cache.name(), cache);
     }
+    SolrMetricManager manager = core.getCoreContainer().getMetricManager();
+    String registry = core.getCoreMetricManager().getRegistryName();
+    for (SolrCache cache : cacheList) {
+      cache.initializeMetrics(manager, registry, SolrMetricManager.mkName(cache.name(), STATISTICS_KEY));
+    }
+    initializeMetrics(manager, registry, STATISTICS_KEY);
     registerTime = new Date();
   }
 
@@ -2190,7 +2211,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
 
 
   /////////////////////////////////////////////////////////////////////
-  // SolrInfoMBean stuff: Statistics and Module Info
+  // SolrInfoBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////
 
   @Override
@@ -2199,11 +2220,6 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   }
 
   @Override
-  public String getVersion() {
-    return SolrCore.version;
-  }
-
-  @Override
   public String getDescription() {
     return "index searcher";
   }
@@ -2214,38 +2230,31 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   }
 
   @Override
-  public String getSource() {
-    return null;
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;
+  public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
+
+    manager.registerGauge(this, registry, () -> name, true, "searcherName", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> cachingEnabled, true, "caching", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> openTime, true, "openedAt", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> warmupTime, true, "warmupTime", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> registerTime, true, "registeredAt", Category.SEARCHER.toString(), scope);
+    // reader stats
+    manager.registerGauge(this, registry, () -> reader.numDocs(), true, "numDocs", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> reader.maxDoc(), true, "maxDoc", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> reader.maxDoc() - reader.numDocs(), true, "deletedDocs", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> reader.toString(), true, "reader", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> reader.directory().toString(), true, "readerDir", Category.SEARCHER.toString(), scope);
+    manager.registerGauge(this, registry, () -> reader.getVersion(), true, "indexVersion", Category.SEARCHER.toString(), scope);
+
   }
 
   @Override
-  public NamedList<Object> getStatistics() {
-    final NamedList<Object> lst = new SimpleOrderedMap<>();
-    lst.add("searcherName", name);
-    lst.add("caching", cachingEnabled);
-
-    lst.addAll(readerStats);
-
-    lst.add("openedAt", openTime);
-    if (registerTime != null) lst.add("registeredAt", registerTime);
-    lst.add("warmupTime", warmupTime);
-    return lst;
-  }
-
-  static private NamedList<Object> snapStatistics(DirectoryReader reader) {
-    final NamedList<Object> lst = new SimpleOrderedMap<>();
-    lst.add("numDocs", reader.numDocs());
-    lst.add("maxDoc", reader.maxDoc());
-    lst.add("deletedDocs", reader.maxDoc() - reader.numDocs());
-    lst.add("reader", reader.toString());
-    lst.add("readerDir", reader.directory());
-    lst.add("indexVersion", reader.getVersion());
-    return lst;
+  public MetricRegistry getMetricRegistry() {
+    return core.getMetricRegistry();
   }
 
   private static class FilterImpl extends Filter {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
index c2b8a5d..b802c41 100644
--- a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
+++ b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
@@ -58,9 +58,11 @@ import org.apache.solr.search.facet.HLLAgg;
 import org.apache.solr.search.facet.MaxAgg;
 import org.apache.solr.search.facet.MinAgg;
 import org.apache.solr.search.facet.PercentileAgg;
+import org.apache.solr.search.facet.StddevAgg;
 import org.apache.solr.search.facet.SumAgg;
 import org.apache.solr.search.facet.SumsqAgg;
 import org.apache.solr.search.facet.UniqueAgg;
+import org.apache.solr.search.facet.VarianceAgg;
 import org.apache.solr.search.function.CollapseScoreFunction;
 import org.apache.solr.search.function.OrdFieldSource;
 import org.apache.solr.search.function.ReverseOrdFieldSource;
@@ -931,14 +933,21 @@ public abstract class ValueSourceParser implements NamedListInitializedPlugin {
       }
     });
 
-    /***
-     addParser("agg_stdev", new ValueSourceParser() {
-    @Override
-    public ValueSource parse(FunctionQParser fp) throws SyntaxError {
-    return null;
-    }
+    addParser("agg_variance", new ValueSourceParser() {
+      @Override
+      public ValueSource parse(FunctionQParser fp) throws SyntaxError {
+        return new VarianceAgg(fp.parseValueSource());
+      }
     });
-
+    
+    addParser("agg_stddev", new ValueSourceParser() {
+      @Override
+      public ValueSource parse(FunctionQParser fp) throws SyntaxError {
+        return new StddevAgg(fp.parseValueSource());
+      }
+    });
+    
+    /***
      addParser("agg_multistat", new ValueSourceParser() {
     @Override
     public ValueSource parse(FunctionQParser fp) throws SyntaxError {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
index f8f6463..9ec5d79 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
@@ -177,9 +177,11 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
       if (freq.missing) {
         refinement = getRefinementSpecial(mcontext, refinement, tagsWithPartial, missingBucket, "missing");
       }
+      /** allBuckets does not execute sub-facets because we don't change the domain.  We may need refinement info in the future though for stats.
       if (freq.allBuckets) {
         refinement = getRefinementSpecial(mcontext, refinement, tagsWithPartial, allBuckets, "allBuckets");
       }
+       **/
     }
     return refinement;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
index 95b9f0b..c19d55d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
@@ -24,6 +24,8 @@ import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.schema.SchemaField;
 
+import static org.apache.solr.search.facet.FacetContext.SKIP_FACET;
+
 /**
  * Base class for DV/UIF accumulating counts into an array by ordinal.  It's
  * for {@link org.apache.lucene.index.SortedDocValues} and {@link org.apache.lucene.index.SortedSetDocValues} only.
@@ -57,8 +59,14 @@ abstract class FacetFieldProcessorByArray extends FacetFieldProcessor {
   }
 
   private SimpleOrderedMap<Object> calcFacets() throws IOException {
+    SimpleOrderedMap<Object> refineResult = null;
+    boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0;
+
     if (fcontext.facetInfo != null) {
-      return refineFacets();
+      refineResult = refineFacets();
+      // if we've seen this facet bucket, then refining can be done.  If we haven't, we still
+      // only need to continue if we need allBuckets or numBuckets info.
+      if (skipThisFacet || (!freq.allBuckets && !freq.numBuckets)) return refineResult;
     }
 
     String prefix = freq.prefix;
@@ -71,6 +79,20 @@ abstract class FacetFieldProcessorByArray extends FacetFieldProcessor {
 
     findStartAndEndOrds();
 
+    if (refineResult != null) {
+      if (freq.allBuckets) {
+        createAccs(nDocs, 1);
+        allBucketsAcc = new SpecialSlotAcc(fcontext, null, -1, accs, 0);
+        collectDocs();
+
+        SimpleOrderedMap<Object> allBuckets = new SimpleOrderedMap<>();
+        allBuckets.add("count", allBucketsAcc.getSpecialCount());
+        allBucketsAcc.setValues(allBuckets, -1); // -1 slotNum is unused for SpecialSlotAcc
+        refineResult.add("allBuckets", allBuckets);
+        return refineResult;
+      }
+    }
+
     maxSlots = nTerms;
 
     if (freq.allBuckets) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
index bf13791..3407ae4 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
@@ -319,12 +319,6 @@ public class FacetModule extends SearchComponent {
   public Category getCategory() {
     return Category.QUERY;
   }
-
-  @Override
-  public String getSource() {
-    return null;
-  }
-
 }
 
 


[17/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrCores.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCores.java b/solr/core/src/java/org/apache/solr/core/SolrCores.java
index 40d5115..ef6fca5 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCores.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCores.java
@@ -47,7 +47,8 @@ class SolrCores implements Observer {
   private static Object modifyLock = new Object(); // for locking around manipulating any of the core maps.
   private final Map<String, SolrCore> cores = new LinkedHashMap<>(); // For "permanent" cores
 
-  private final Map<String, CoreDescriptor> lazyDescriptors = new LinkedHashMap<>();
+  // These descriptors, once loaded, will _not_ be unloaded, i.e. they are not "transient".
+  private final Map<String, CoreDescriptor> residentDesciptors = new LinkedHashMap<>();
 
   private final CoreContainer container;
   
@@ -67,17 +68,26 @@ class SolrCores implements Observer {
     this.container = container;
   }
   
-  protected void putDynamicDescriptor(String rawName, CoreDescriptor cd) {
+  protected void addCoreDescriptor(CoreDescriptor p) {
     synchronized (modifyLock) {
-      if (cd.isTransient()) {
+      if (p.isTransient()) {
+        if (container.getTransientCacheHandler() != null) {
+          container.getTransientCacheHandler().addTransientDescriptor(p.getName(), p);
+        }
+      } else {
+        residentDesciptors.put(p.getName(), p);
+      }
+    }
+  }
+
+  protected void removeCoreDescriptor(CoreDescriptor p) {
+    synchronized (modifyLock) {
+      if (p.isTransient()) {
         if (container.getTransientCacheHandler() != null) {
-          container.getTransientCacheHandler().addTransientDescriptor(rawName, cd);
-        } else {
-          log.error("Tried to add transient core to transient handler, but no transient core handler has been found. "
-              + " Descriptor: " + cd.toString());
+          container.getTransientCacheHandler().removeTransientDescriptor(p.getName());
         }
       } else {
-        lazyDescriptors.put(rawName, cd);
+        residentDesciptors.remove(p.getName());
       }
     }
   }
@@ -149,10 +159,18 @@ class SolrCores implements Observer {
     return retCore;
   }
 
-  protected SolrCore putCore(String name, SolrCore core) {
+  // Returns the old core if there was a core of the same name.
+  protected SolrCore putCore(CoreDescriptor cd, SolrCore core) {
     synchronized (modifyLock) {
-      return cores.put(name, core);
+      if (cd.isTransient()) {
+        if (container.getTransientCacheHandler() != null) {
+          return container.getTransientCacheHandler().addCore(cd.getName(), core);
+        }
+      } else {
+        return cores.put(cd.getName(), core);
+      }
     }
+    return null;
   }
 
   /**
@@ -231,7 +249,7 @@ class SolrCores implements Observer {
       if (container.getTransientCacheHandler() != null) {
         set.addAll(container.getTransientCacheHandler().getAllCoreNames());
       }
-      set.addAll(lazyDescriptors.keySet());
+      set.addAll(residentDesciptors.keySet());
     }
     return set;
   }
@@ -260,13 +278,19 @@ class SolrCores implements Observer {
           throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No such core: " + n1);
         }
       }
+      // When we swap the cores, we also need to swap the associated core descriptors. Note, this changes the 
+      // name of the coreDescriptor by virtue of the c-tor
+      CoreDescriptor cd1 = c1.getCoreDescriptor(); 
+      addCoreDescriptor(new CoreDescriptor(n1, c0.getCoreDescriptor()));
+      addCoreDescriptor(new CoreDescriptor(n0, cd1));
       cores.put(n0, c1);
       cores.put(n1, c0);
+      c0.setName(n1);
+      c1.setName(n0);
+      
       container.getMetricManager().swapRegistries(
           c0.getCoreMetricManager().getRegistryName(),
           c1.getCoreMetricManager().getRegistryName());
-      c0.setName(n1);
-      c1.setName(n0);
     }
 
   }
@@ -277,12 +301,10 @@ class SolrCores implements Observer {
       SolrCore ret = cores.remove(name);
       // It could have been a newly-created core. It could have been a transient core. The newly-created cores
       // in particular should be checked. It could have been a dynamic core.
-      TransientSolrCoreCache transientHandler = container.getTransientCacheHandler(); 
+      TransientSolrCoreCache transientHandler = container.getTransientCacheHandler();
       if (ret == null && transientHandler != null) {
         ret = transientHandler.removeCore(name);
-        transientHandler.removeTransientDescriptor(name);
       }
-      lazyDescriptors.remove(name);
       return ret;
     }
   }
@@ -304,14 +326,6 @@ class SolrCores implements Observer {
     }
   }
 
-  protected CoreDescriptor getDynamicDescriptor(String name) {
-    synchronized (modifyLock) {
-      CoreDescriptor cd = lazyDescriptors.get(name);
-      if (cd != null || container.getTransientCacheHandler() == null) return cd;
-      return container.getTransientCacheHandler().getTransientDescriptor(name);
-    }
-  }
-
   // See SOLR-5366 for why the UNLOAD command needs to know whether a core is actually loaded or not, it might have
   // to close the core. However, there's a race condition. If the core happens to be in the pending "to close" queue,
   // we should NOT close it in unload core.
@@ -350,7 +364,7 @@ class SolrCores implements Observer {
 
   protected CoreDescriptor getUnloadedCoreDescriptor(String cname) {
     synchronized (modifyLock) {
-      CoreDescriptor desc = lazyDescriptors.get(cname);
+      CoreDescriptor desc = residentDesciptors.get(cname);
       if (desc == null) {
         if (container.getTransientCacheHandler() == null) return null;
         desc = container.getTransientCacheHandler().getTransientDescriptor(cname);
@@ -439,10 +453,8 @@ class SolrCores implements Observer {
    */
   public CoreDescriptor getCoreDescriptor(String coreName) {
     synchronized (modifyLock) {
-      if (cores.containsKey(coreName))
-        return cores.get(coreName).getCoreDescriptor();
-      if (lazyDescriptors.containsKey(coreName) || container.getTransientCacheHandler() == null)
-        return lazyDescriptors.get(coreName);
+      if (residentDesciptors.containsKey(coreName))
+        return residentDesciptors.get(coreName);
       return container.getTransientCacheHandler().getTransientDescriptor(coreName);
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java b/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java
new file mode 100644
index 0000000..472b15e
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core;
+
+import java.util.Map;
+import java.util.Set;
+
+import com.codahale.metrics.MetricRegistry;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.util.stats.MetricUtils;
+
+/**
+ * Interface for getting various ui friendly strings
+ * for use by objects which are 'pluggable' to make server administration
+ * easier.
+ */
+public interface SolrInfoBean {
+
+  /**
+   * Category of Solr component.
+   */
+  enum Category { CONTAINER, ADMIN, CORE, QUERY, UPDATE, CACHE, HIGHLIGHTER, QUERYPARSER, SPELLCHECKER,
+    SEARCHER, REPLICATION, TLOG, INDEX, DIRECTORY, HTTP, OTHER }
+
+  /**
+   * Top-level group of beans or metrics for a subsystem.
+   */
+  enum Group { jvm, jetty, node, core, collection, shard, cluster, overseer }
+
+  /**
+   * Simple common usage name, e.g. BasicQueryHandler,
+   * or fully qualified class name.
+   */
+  String getName();
+  /** Simple one or two line description */
+  String getDescription();
+  /** Category of this component */
+  Category getCategory();
+
+  /** Optionally return a snapshot of metrics that this component reports, or null.
+   * Default implementation requires that both {@link #getMetricNames()} and
+   * {@link #getMetricRegistry()} return non-null values.
+   */
+  default Map<String, Object> getMetricsSnapshot() {
+    if (getMetricRegistry() == null || getMetricNames() == null) {
+      return null;
+    }
+    return MetricUtils.convertMetrics(getMetricRegistry(), getMetricNames());
+  }
+
+  /**
+   * Modifiable set of metric names that this component reports (default is null,
+   * which means none). If not null then this set is used by {@link #registerMetricName(String)}
+   * to capture what metrics names are reported from this component.
+   */
+  default Set<String> getMetricNames() {
+    return null;
+  }
+
+  /**
+   * An instance of {@link MetricRegistry} that this component uses for metrics reporting
+   * (default is null, which means no registry).
+   */
+  default MetricRegistry getMetricRegistry() {
+    return null;
+  }
+
+  /** Register a metric name that this component reports. This method is called by various
+   * metric registration methods in {@link org.apache.solr.metrics.SolrMetricManager} in order
+   * to capture what metric names are reported from this component (which in turn is called
+   * from {@link org.apache.solr.metrics.SolrMetricProducer#initializeMetrics(SolrMetricManager, String, String)}).
+   * <p>Default implementation registers all metrics added by a component. Implementations may
+   * override this to avoid reporting some or all metrics returned by {@link #getMetricsSnapshot()}</p>
+   */
+  default void registerMetricName(String name) {
+    Set<String> names = getMetricNames();
+    if (names != null) {
+      names.add(name);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java b/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
deleted file mode 100644
index 63bdef0..0000000
--- a/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import java.net.URL;
-
-import org.apache.solr.common.util.NamedList;
-
-/**
- * MBean interface for getting various ui friendly strings and URLs
- * for use by objects which are 'pluggable' to make server administration
- * easier.
- *
- *
- */
-public interface SolrInfoMBean {
-
-  /**
-   * Category of Solr component.
-   */
-  enum Category { CONTAINER, ADMIN, CORE, QUERY, UPDATE, CACHE, HIGHLIGHTER, QUERYPARSER, SPELLCHECKER,
-    SEARCHER, REPLICATION, TLOG, INDEX, DIRECTORY, HTTP, OTHER }
-
-  /**
-   * Top-level group of beans or metrics for a subsystem.
-   */
-  enum Group { jvm, jetty, node, core, collection, shard, cluster, overseer }
-
-  /**
-   * Simple common usage name, e.g. BasicQueryHandler,
-   * or fully qualified clas name.
-   */
-  public String getName();
-  /** Simple common usage version, e.g. 2.0 */
-  public String getVersion();
-  /** Simple one or two line description */
-  public String getDescription();
-  /** Purpose of this Class */
-  public Category getCategory();
-  /** CVS Source, SVN Source, etc */
-  public String getSource();
-  /**
-   * Documentation URL list.
-   *
-   * <p>
-   * Suggested documentation URLs: Homepage for sponsoring project,
-   * FAQ on class usage, Design doc for class, Wiki, bug reporting URL, etc...
-   * </p>
-   */
-  public URL[] getDocs();
-  /**
-   * Any statistics this instance would like to be publicly available via
-   * the Solr Administration interface.
-   *
-   * <p>
-   * Any Object type may be stored in the list, but only the
-   * <code>toString()</code> representation will be used.
-   * </p>
-   */
-  public NamedList getStatistics();
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrInfoMBeanWrapper.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrInfoMBeanWrapper.java b/solr/core/src/java/org/apache/solr/core/SolrInfoMBeanWrapper.java
deleted file mode 100644
index 534b884..0000000
--- a/solr/core/src/java/org/apache/solr/core/SolrInfoMBeanWrapper.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import java.net.URL;
-
-import org.apache.solr.common.util.NamedList;
-
-/**
- * Wraps a {@link SolrInfoMBean}.
- */
-public class SolrInfoMBeanWrapper implements SolrInfoMBean {
-  private final SolrInfoMBean mbean;
-
-  public SolrInfoMBeanWrapper(SolrInfoMBean mbean) {
-    this.mbean = mbean;
-  }
-
-  /** {@inheritDoc} */
-  @Override
-  public String getName() { return mbean.getName(); }
-
-  /** {@inheritDoc} */
-  @Override
-  public String getVersion() { return mbean.getVersion(); }
-
-  /** {@inheritDoc} */
-  @Override
-  public String getDescription() { return mbean.getDescription(); }
-
-  /** {@inheritDoc} */
-  @Override
-  public Category getCategory() { return mbean.getCategory(); }
-
-  /** {@inheritDoc} */
-  @Override
-  public String getSource() { return mbean.getSource(); }
-
-  /** {@inheritDoc} */
-  @Override
-  public URL[] getDocs() { return mbean.getDocs(); }
-
-  /** {@inheritDoc} */
-  @Override
-  public NamedList getStatistics() { return mbean.getStatistics(); }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index d08646a..17cdbbc 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -101,7 +101,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   private String dataDir;
   
   private final List<SolrCoreAware> waitingForCore = Collections.synchronizedList(new ArrayList<SolrCoreAware>());
-  private final List<SolrInfoMBean> infoMBeans = Collections.synchronizedList(new ArrayList<SolrInfoMBean>());
+  private final List<SolrInfoBean> infoMBeans = Collections.synchronizedList(new ArrayList<SolrInfoBean>());
   private final List<ResourceLoaderAware> waitingForResources = Collections.synchronizedList(new ArrayList<ResourceLoaderAware>());
   private static final Charset UTF_8 = StandardCharsets.UTF_8;
 
@@ -665,9 +665,9 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
         assertAwareCompatibility( ResourceLoaderAware.class, obj );
         waitingForResources.add( (ResourceLoaderAware)obj );
       }
-      if (obj instanceof SolrInfoMBean){
+      if (obj instanceof SolrInfoBean){
         //TODO: Assert here?
-        infoMBeans.add((SolrInfoMBean) obj);
+        infoMBeans.add((SolrInfoBean) obj);
       }
     }
 
@@ -723,21 +723,21 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   }
 
   /**
-   * Register any {@link org.apache.solr.core.SolrInfoMBean}s
+   * Register any {@link SolrInfoBean}s
    * @param infoRegistry The Info Registry
    */
-  public void inform(Map<String, SolrInfoMBean> infoRegistry) {
+  public void inform(Map<String, SolrInfoBean> infoRegistry) {
     // this can currently happen concurrently with requests starting and lazy components
     // loading.  Make sure infoMBeans doesn't change.
 
-    SolrInfoMBean[] arr;
+    SolrInfoBean[] arr;
     synchronized (infoMBeans) {
-      arr = infoMBeans.toArray(new SolrInfoMBean[infoMBeans.size()]);
+      arr = infoMBeans.toArray(new SolrInfoBean[infoMBeans.size()]);
       waitingForResources.clear();
     }
 
 
-    for (SolrInfoMBean bean : arr) {
+    for (SolrInfoBean bean : arr) {
       // Too slow? I suspect not, but we may need
       // to start tracking this in a Set.
       if (!infoRegistry.containsValue(bean)) {
@@ -880,7 +880,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   public void close() throws IOException {
     IOUtils.close(classLoader);
   }
-  public List<SolrInfoMBean> getInfoMBeans(){
+  public List<SolrInfoBean> getInfoMBeans(){
     return Collections.unmodifiableList(infoMBeans);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
index b37bd52..65f29a4 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.core;
 
+import javax.management.MBeanServer;
 import javax.xml.xpath.XPath;
 import javax.xml.xpath.XPathConstants;
 import javax.xml.xpath.XPathExpressionException;
@@ -25,7 +26,10 @@ import java.lang.invoke.MethodHandles;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
@@ -35,8 +39,10 @@ import org.apache.commons.io.IOUtils;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.logging.LogWatcherConfig;
+import org.apache.solr.metrics.reporters.SolrJmxReporter;
 import org.apache.solr.update.UpdateShardHandlerConfig;
 import org.apache.solr.util.DOMUtil;
+import org.apache.solr.util.JmxUtil;
 import org.apache.solr.util.PropertiesUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -98,6 +104,7 @@ public class SolrXmlConfig {
       configBuilder.setCloudConfig(cloudConfig);
     configBuilder.setBackupRepositoryPlugins(getBackupRepositoryPluginInfos(config));
     configBuilder.setMetricReporterPlugins(getMetricReporterPluginInfos(config));
+    configBuilder.setHiddenSysProps(getHiddenSysProps(config));
     return fillSolrSection(configBuilder, entries);
   }
 
@@ -284,6 +291,7 @@ public class SolrXmlConfig {
     int distributedSocketTimeout = UpdateShardHandlerConfig.DEFAULT_DISTRIBUPDATESOTIMEOUT;
     int distributedConnectionTimeout = UpdateShardHandlerConfig.DEFAULT_DISTRIBUPDATECONNTIMEOUT;
     String metricNameStrategy = UpdateShardHandlerConfig.DEFAULT_METRICNAMESTRATEGY;
+    int maxRecoveryThreads = UpdateShardHandlerConfig.DEFAULT_MAXRECOVERYTHREADS;
 
     Object muc = nl.remove("maxUpdateConnections");
     if (muc != null) {
@@ -315,10 +323,17 @@ public class SolrXmlConfig {
       defined = true;
     }
 
+    Object mrt = nl.remove("maxRecoveryThreads");
+    if (mrt != null)  {
+      maxRecoveryThreads = parseInt("maxRecoveryThreads", mrt.toString());
+      defined = true;
+    }
+
     if (!defined && !alwaysDefine)
       return null;
 
-    return new UpdateShardHandlerConfig(maxUpdateConnections, maxUpdateConnectionsPerHost, distributedSocketTimeout, distributedConnectionTimeout, metricNameStrategy);
+    return new UpdateShardHandlerConfig(maxUpdateConnections, maxUpdateConnectionsPerHost, distributedSocketTimeout,
+                                        distributedConnectionTimeout, metricNameStrategy, maxRecoveryThreads);
 
   }
 
@@ -448,15 +463,51 @@ public class SolrXmlConfig {
 
   private static PluginInfo[] getMetricReporterPluginInfos(Config config) {
     NodeList nodes = (NodeList) config.evaluate("solr/metrics/reporter", XPathConstants.NODESET);
-    if (nodes == null || nodes.getLength() == 0)
-      return new PluginInfo[0];
-    PluginInfo[] configs = new PluginInfo[nodes.getLength()];
+    List<PluginInfo> configs = new ArrayList<>();
+    boolean hasJmxReporter = false;
+    if (nodes != null && nodes.getLength() > 0) {
+      for (int i = 0; i < nodes.getLength(); i++) {
+        // we don't require class in order to support predefined replica and node reporter classes
+        PluginInfo info = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
+        String clazz = info.className;
+        if (clazz != null && clazz.equals(SolrJmxReporter.class.getName())) {
+          hasJmxReporter = true;
+        }
+        configs.add(info);
+      }
+    }
+    // if there's an MBean server running but there was no JMX reporter then add a default one
+    MBeanServer mBeanServer = JmxUtil.findFirstMBeanServer();
+    if (mBeanServer != null && !hasJmxReporter) {
+      log.info("MBean server found: " + mBeanServer + ", but no JMX reporters were configured - adding default JMX reporter.");
+      Map<String,Object> attributes = new HashMap<>();
+      attributes.put("name", "default");
+      attributes.put("class", SolrJmxReporter.class.getName());
+      PluginInfo defaultPlugin = new PluginInfo("reporter", attributes);
+      configs.add(defaultPlugin);
+    }
+    return configs.toArray(new PluginInfo[configs.size()]);
+  }
+
+  private static Set<String> getHiddenSysProps(Config config) {
+    NodeList nodes = (NodeList) config.evaluate("solr/metrics/hiddenSysProps/str", XPathConstants.NODESET);
+    if (nodes == null || nodes.getLength() == 0) {
+      return NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS;
+    }
+    Set<String> props = new HashSet<>();
     for (int i = 0; i < nodes.getLength(); i++) {
-      // we don't require class in order to support predefined replica and node reporter classes
-      configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
+      String prop = DOMUtil.getText(nodes.item(i));
+      if (prop != null && !prop.trim().isEmpty()) {
+        props.add(prop.trim());
+      }
+    }
+    if (props.isEmpty()) {
+      return NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS;
+    } else {
+      return props;
     }
-    return configs;
   }
+
   private static PluginInfo getTransientCoreCacheFactoryPluginInfo(Config config) {
     Node node = config.getNode("solr/transientCoreCacheFactory", false);
     return (node == null) ? null : new PluginInfo(node, "transientCoreCacheFactory", false, true);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/core/ZkContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/ZkContainer.java b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
index 6665c4e..16ba4d8 100644
--- a/solr/core/src/java/org/apache/solr/core/ZkContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
@@ -115,7 +115,7 @@ public class ZkContainer {
               @Override
               public List<CoreDescriptor> getCurrentDescriptors() {
                 List<CoreDescriptor> descriptors = new ArrayList<>(
-                    cc.getCoreNames().size());
+                    cc.getLoadedCoreNames().size());
                 Collection<SolrCore> cores = cc.getCores();
                 for (SolrCore core : cores) {
                   descriptors.add(core.getCoreDescriptor());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java b/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
index 485f9c3..392930f 100644
--- a/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
+++ b/solr/core/src/java/org/apache/solr/handler/AnalyzeEvaluator.java
@@ -14,9 +14,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-/**
- *
- */
 package org.apache.solr.handler;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/CdcrBufferStateManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrBufferStateManager.java b/solr/core/src/java/org/apache/solr/handler/CdcrBufferStateManager.java
index 713d6fc..fd8d4bb 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrBufferStateManager.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrBufferStateManager.java
@@ -62,7 +62,7 @@ class CdcrBufferStateManager extends CdcrStateManager {
 
     // Startup and register the watcher at startup
     try {
-      SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+      SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
       watcher = this.initWatcher(zkClient);
       this.setState(CdcrParams.BufferState.get(zkClient.getData(this.getZnodePath(), watcher, null, true)));
     } catch (KeeperException | InterruptedException e) {
@@ -103,7 +103,7 @@ class CdcrBufferStateManager extends CdcrStateManager {
    * action.
    */
   void synchronize() {
-    SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+    SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
     try {
       zkClient.setData(this.getZnodePath(), this.getState().getBytes(), true);
       // check if nobody changed it in the meantime, and set a new watcher
@@ -114,7 +114,7 @@ class CdcrBufferStateManager extends CdcrStateManager {
   }
 
   private void createStateNode() {
-    SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+    SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
     try {
       if (!zkClient.exists(this.getZnodePath(), true)) {
         if (!zkClient.exists(this.getZnodeBase(), true)) {
@@ -158,7 +158,7 @@ class CdcrBufferStateManager extends CdcrStateManager {
       if (Event.EventType.None.equals(event.getType())) {
         return;
       }
-      SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+      SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
       try {
         CdcrParams.BufferState state = CdcrParams.BufferState.get(zkClient.getData(CdcrBufferStateManager.this.getZnodePath(), watcher, null, true));
         log.info("Received new CDCR buffer state from watcher: {} @ {}:{}", state, collectionName, shard);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/CdcrLeaderStateManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrLeaderStateManager.java b/solr/core/src/java/org/apache/solr/handler/CdcrLeaderStateManager.java
index 7d8ddc4..1b4d8af 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrLeaderStateManager.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrLeaderStateManager.java
@@ -53,8 +53,8 @@ class CdcrLeaderStateManager extends CdcrStateManager {
 
     // Fetch leader state and register the watcher at startup
     try {
-      SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
-      ClusterState clusterState = core.getCoreDescriptor().getCoreContainer().getZkController().getClusterState();
+      SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
+      ClusterState clusterState = core.getCoreContainer().getZkController().getClusterState();
 
       watcher = this.initWatcher(zkClient);
       // if the node does not exist, it means that the leader was not yet registered. This can happen
@@ -89,7 +89,7 @@ class CdcrLeaderStateManager extends CdcrStateManager {
   }
 
   private void checkIfIAmLeader() throws KeeperException, InterruptedException {
-    SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+    SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
     ZkNodeProps props = ZkNodeProps.load(zkClient.getData(CdcrLeaderStateManager.this.getZnodePath(), null, null, true));
     if (props != null) {
       CdcrLeaderStateManager.this.setAmILeader(props.get("core").equals(core.getName()));
@@ -144,8 +144,8 @@ class CdcrLeaderStateManager extends CdcrStateManager {
 
       try {
         log.info("Received new leader state @ {}:{}", collectionName, shard);
-        SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
-        ClusterState clusterState = core.getCoreDescriptor().getCoreContainer().getZkController().getClusterState();
+        SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
+        ClusterState clusterState = core.getCoreContainer().getZkController().getClusterState();
         if (CdcrLeaderStateManager.this.isLeaderRegistered(zkClient, clusterState)) {
           CdcrLeaderStateManager.this.checkIfIAmLeader();
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/CdcrProcessStateManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrProcessStateManager.java b/solr/core/src/java/org/apache/solr/handler/CdcrProcessStateManager.java
index b1c8dda..05be077 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrProcessStateManager.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrProcessStateManager.java
@@ -62,7 +62,7 @@ class CdcrProcessStateManager extends CdcrStateManager {
 
     // Register the watcher at startup
     try {
-      SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+      SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
       watcher = this.initWatcher(zkClient);
       this.setState(CdcrParams.ProcessState.get(zkClient.getData(this.getZnodePath(), watcher, null, true)));
     } catch (KeeperException | InterruptedException e) {
@@ -103,7 +103,7 @@ class CdcrProcessStateManager extends CdcrStateManager {
    * action.
    */
   void synchronize() {
-    SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+    SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
     try {
       zkClient.setData(this.getZnodePath(), this.getState().getBytes(), true);
       // check if nobody changed it in the meantime, and set a new watcher
@@ -114,7 +114,7 @@ class CdcrProcessStateManager extends CdcrStateManager {
   }
 
   private void createStateNode() {
-    SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+    SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
     try {
       if (!zkClient.exists(this.getZnodePath(), true)) {
         if (!zkClient.exists(this.getZnodeBase(), true)) { // Should be a no-op if the node exists
@@ -158,7 +158,7 @@ class CdcrProcessStateManager extends CdcrStateManager {
       if (Event.EventType.None.equals(event.getType())) {
         return;
       }
-      SolrZkClient zkClient = core.getCoreDescriptor().getCoreContainer().getZkController().getZkClient();
+      SolrZkClient zkClient = core.getCoreContainer().getZkController().getZkClient();
       try {
         CdcrParams.ProcessState state = CdcrParams.ProcessState.get(zkClient.getData(CdcrProcessStateManager.this.getZnodePath(), watcher, null, true));
         log.info("Received new CDCR process state from watcher: {} @ {}:{}", state, collectionName, shard);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java
index 528e0b7..6f0d704 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java
@@ -236,7 +236,7 @@ class CdcrReplicatorManager implements CdcrStateManager.CdcrStateObserver {
       this.ulog = (CdcrUpdateLog) core.getUpdateHandler().getUpdateLog();
       this.state = state;
       this.targetCollection = state.getTargetCollection();
-      String baseUrl = core.getCoreDescriptor().getCoreContainer().getZkController().getBaseUrl();
+      String baseUrl = core.getCoreContainer().getZkController().getBaseUrl();
       this.myCoreUrl = ZkCoreNodeProps.getCoreUrl(baseUrl, core.getName());
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java
index ba174f9..44c8646 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java
@@ -244,7 +244,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
     collection = core.getCoreDescriptor().getCloudDescriptor().getCollectionName();
 
     // Make sure that the core is ZKAware
-    if (!core.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
+    if (!core.getCoreContainer().isZooKeeperAware()) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Solr instance is not running in SolrCloud mode.");
     }
@@ -390,7 +390,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
    */
   private void handleCollectionCheckpointAction(SolrQueryRequest req, SolrQueryResponse rsp)
       throws IOException, SolrServerException {
-    ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zkController = core.getCoreContainer().getZkController();
     try {
       zkController.getZkStateReader().forceUpdateCollection(collection);
     } catch (Exception e) {
@@ -638,7 +638,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
           running.set(true);
           String masterUrl = req.getParams().get(ReplicationHandler.MASTER_URL);
           bootstrapCallable = new BootstrapCallable(masterUrl, core);
-          bootstrapFuture = core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getRecoveryExecutor().submit(bootstrapCallable);
+          bootstrapFuture = core.getCoreContainer().getUpdateShardHandler().getRecoveryExecutor().submit(bootstrapCallable);
           try {
             bootstrapFuture.get();
           } catch (InterruptedException e) {
@@ -659,7 +659,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
     };
 
     try {
-      core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getUpdateExecutor().submit(runnable);
+      core.getCoreContainer().getUpdateShardHandler().getUpdateExecutor().submit(runnable);
       rsp.add(RESPONSE_STATUS, "submitted");
     } catch (RejectedExecutionException ree)  {
       // no problem, we're probably shutting down

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java b/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java
index 48bfec0..7ce060c 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java
@@ -112,7 +112,7 @@ class CdcrUpdateLogSynchronizer implements CdcrStateManager.CdcrStateObserver {
   private class UpdateLogSynchronisation implements Runnable {
 
     private String getLeaderUrl() {
-      ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+      ZkController zkController = core.getCoreContainer().getZkController();
       ClusterState cstate = zkController.getClusterState();
       DocCollection docCollection = cstate.getCollection(collection);
       ZkNodeProps leaderProps = docCollection.getLeader(shardId);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
index 6d41d83..c843e0a 100644
--- a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
@@ -84,12 +84,12 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
 
     String defaultCollection;
     String defaultZkhost;
-    CoreContainer coreContainer = core.getCoreDescriptor().getCoreContainer();
+    CoreContainer coreContainer = core.getCoreContainer();
     this.coreName = core.getName();
 
     if(coreContainer.isZooKeeperAware()) {
       defaultCollection = core.getCoreDescriptor().getCollectionName();
-      defaultZkhost = core.getCoreDescriptor().getCoreContainer().getZkController().getZkServerAddress();
+      defaultZkhost = core.getCoreContainer().getZkController().getZkServerAddress();
       streamFactory.withCollectionZkHost(defaultCollection, defaultZkhost);
       streamFactory.withDefaultZkHost(defaultZkhost);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
index d79effd..96e505a 100644
--- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
+++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
@@ -215,7 +215,7 @@ public class IndexFetcher {
     httpClientParams.set(HttpClientUtil.PROP_BASIC_AUTH_PASS, httpBasicAuthPassword);
     httpClientParams.set(HttpClientUtil.PROP_ALLOW_COMPRESSION, useCompression);
 
-    return HttpClientUtil.createClient(httpClientParams, core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getConnectionManager(), true);
+    return HttpClientUtil.createClient(httpClientParams, core.getCoreContainer().getUpdateShardHandler().getConnectionManager(), true);
   }
 
   public IndexFetcher(final NamedList initArgs, final ReplicationHandler handler, final SolrCore sc) {
@@ -482,7 +482,7 @@ public class IndexFetcher {
           // because of soft commits (which open a searcher on IW's data)
           // so we need to close the existing searcher on the last commit
           // and wait until we are able to clean up all unused lucene files
-          if (solrCore.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
+          if (solrCore.getCoreContainer().isZooKeeperAware()) {
             solrCore.closeSearcher();
           }
 
@@ -639,7 +639,7 @@ public class IndexFetcher {
   }
 
   private Replica getLeaderReplica() throws InterruptedException {
-    ZkController zkController = solrCore.getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zkController = solrCore.getCoreContainer().getZkController();
     CloudDescriptor cd = solrCore.getCoreDescriptor().getCloudDescriptor();
     Replica leaderReplica = zkController.getZkStateReader().getLeaderRetry(
         cd.getCollectionName(), cd.getShardId());
@@ -658,7 +658,7 @@ public class IndexFetcher {
         }
       }
 
-      if (core.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
+      if (core.getCoreContainer().isZooKeeperAware()) {
         // we only track replication success in SolrCloud mode
         core.getUpdateHandler().getSolrCoreState().setLastReplicateIndexSuccess(successfulInstall);
       }
@@ -846,7 +846,7 @@ public class IndexFetcher {
     IndexCommit commitPoint;
     // must get the latest solrCore object because the one we have might be closed because of a reload
     // todo stop keeping solrCore around
-    SolrCore core = solrCore.getCoreDescriptor().getCoreContainer().getCore(solrCore.getName());
+    SolrCore core = solrCore.getCoreContainer().getCore(solrCore.getName());
     try {
       Future[] waitSearcher = new Future[1];
       searcher = core.getSearcher(true, true, waitSearcher, true);
@@ -874,7 +874,7 @@ public class IndexFetcher {
     final CountDownLatch latch = new CountDownLatch(1);
     new Thread(() -> {
       try {
-        solrCore.getCoreDescriptor().getCoreContainer().reload(solrCore.getName());
+        solrCore.getCoreContainer().reload(solrCore.getName());
       } catch (Exception e) {
         LOG.error("Could not reload core ", e);
       } finally {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
index 9c86350..50ea711 100644
--- a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
@@ -19,8 +19,6 @@ package org.apache.solr.handler;
 import java.io.IOException;
 import java.io.Reader;
 import java.lang.invoke.MethodHandles;
-import java.net.MalformedURLException;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Iterator;
@@ -481,12 +479,4 @@ public class MoreLikeThisHandler extends RequestHandlerBase
   public String getDescription() {
     return "Solr MoreLikeThis";
   }
-
-  @Override
-  public URL[] getDocs() {
-    try {
-      return new URL[] { new URL("http://wiki.apache.org/solr/MoreLikeThis") };
-    }
-    catch( MalformedURLException ex ) { return null; }
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/RealTimeGetHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/RealTimeGetHandler.java b/solr/core/src/java/org/apache/solr/handler/RealTimeGetHandler.java
index 9049318..bce374f 100644
--- a/solr/core/src/java/org/apache/solr/handler/RealTimeGetHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/RealTimeGetHandler.java
@@ -20,7 +20,6 @@ import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
 import org.apache.solr.handler.component.*;
 
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -43,11 +42,6 @@ public class RealTimeGetHandler extends SearchHandler {
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
-  @Override
   public Collection<Api> getApis() {
     return ApiBag.wrapRequestHandlers(this, "core.RealtimeGet");
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 98bf11a..94ff189 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -90,6 +90,8 @@ import org.apache.solr.core.SolrEventListener;
 import org.apache.solr.core.backup.repository.BackupRepository;
 import org.apache.solr.core.backup.repository.LocalFileSystemRepository;
 import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.handler.IndexFetcher.IndexFetchResult;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
@@ -162,6 +164,10 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       }
       return new CommitVersionInfo(generation, version);
     }
+
+    public String toString() {
+      return "generation=" + generation + ",version=" + version;
+    }
   }
 
   private IndexFetcher pollingIndexFetcher;
@@ -437,7 +443,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     String location = params.get(CoreAdminParams.BACKUP_LOCATION);
 
     String repoName = params.get(CoreAdminParams.BACKUP_REPOSITORY);
-    CoreContainer cc = core.getCoreDescriptor().getCoreContainer();
+    CoreContainer cc = core.getCoreContainer();
     BackupRepository repo = null;
     if (repoName != null) {
       repo = cc.newBackupRepository(Optional.of(repoName));
@@ -555,7 +561,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
 
       String location = params.get(CoreAdminParams.BACKUP_LOCATION);
       String repoName = params.get(CoreAdminParams.BACKUP_REPOSITORY);
-      CoreContainer cc = core.getCoreDescriptor().getCoreContainer();
+      CoreContainer cc = core.getCoreContainer();
       BackupRepository repo = null;
       if (repoName != null) {
         repo = cc.newBackupRepository(Optional.of(repoName));
@@ -693,7 +699,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       }
     }
 
-    if (confFileNameAlias.size() < 1 || core.getCoreDescriptor().getCoreContainer().isZooKeeperAware())
+    if (confFileNameAlias.size() < 1 || core.getCoreContainer().isZooKeeperAware())
       return;
     LOG.debug("Adding config files to list: " + includeConfFiles);
     //if configuration files need to be included get their details
@@ -851,52 +857,56 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
   }
 
   @Override
-  @SuppressWarnings("unchecked")
-  public NamedList getStatistics() {
-    NamedList list = super.getStatistics();
-    if (core != null) {
-      list.add("indexSize", NumberUtils.readableSize(core.getIndexSize()));
-      CommitVersionInfo vInfo = (core != null && !core.isClosed()) ? getIndexVersion(): null;
-      list.add("indexVersion", null == vInfo ? 0 : vInfo.version);
-      list.add(GENERATION, null == vInfo ? 0 : vInfo.generation);
-
-      list.add("indexPath", core.getIndexDir());
-      list.add("isMaster", String.valueOf(isMaster));
-      list.add("isSlave", String.valueOf(isSlave));
-
+  public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
+    super.initializeMetrics(manager, registry, scope);
+
+    manager.registerGauge(this, registry, () -> core != null ? NumberUtils.readableSize(core.getIndexSize()) : "", true,
+        "indexSize", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? getIndexVersion().toString() : ""), true,
+        "indexVersion", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? getIndexVersion().generation : 0), true,
+        GENERATION, getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> core != null ? core.getIndexDir() : "", true,
+        "indexPath", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> isMaster, true,
+        "isMaster", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> isSlave, true,
+        "isSlave", getCategory().toString(), scope);
+    final MetricsMap fetcherMap = new MetricsMap((detailed, map) -> {
       IndexFetcher fetcher = currentIndexFetcher;
       if (fetcher != null) {
-        list.add(MASTER_URL, fetcher.getMasterUrl());
+        map.put(MASTER_URL, fetcher.getMasterUrl());
         if (getPollInterval() != null) {
-          list.add(POLL_INTERVAL, getPollInterval());
+          map.put(POLL_INTERVAL, getPollInterval());
         }
-        list.add("isPollingDisabled", String.valueOf(isPollingDisabled()));
-        list.add("isReplicating", String.valueOf(isReplicating()));
+        map.put("isPollingDisabled", isPollingDisabled());
+        map.put("isReplicating", isReplicating());
         long elapsed = fetcher.getReplicationTimeElapsed();
         long val = fetcher.getTotalBytesDownloaded();
         if (elapsed > 0) {
-          list.add("timeElapsed", elapsed);
-          list.add("bytesDownloaded", val);
-          list.add("downloadSpeed", val / elapsed);
+          map.put("timeElapsed", elapsed);
+          map.put("bytesDownloaded", val);
+          map.put("downloadSpeed", val / elapsed);
         }
         Properties props = loadReplicationProperties();
-        addVal(list, IndexFetcher.PREVIOUS_CYCLE_TIME_TAKEN, props, Long.class);
-        addVal(list, IndexFetcher.INDEX_REPLICATED_AT, props, Date.class);
-        addVal(list, IndexFetcher.CONF_FILES_REPLICATED_AT, props, Date.class);
-        addVal(list, IndexFetcher.REPLICATION_FAILED_AT, props, Date.class);
-        addVal(list, IndexFetcher.TIMES_FAILED, props, Integer.class);
-        addVal(list, IndexFetcher.TIMES_INDEX_REPLICATED, props, Integer.class);
-        addVal(list, IndexFetcher.LAST_CYCLE_BYTES_DOWNLOADED, props, Long.class);
-        addVal(list, IndexFetcher.TIMES_CONFIG_REPLICATED, props, Integer.class);
-        addVal(list, IndexFetcher.CONF_FILES_REPLICATED, props, String.class);
+        addVal(map, IndexFetcher.PREVIOUS_CYCLE_TIME_TAKEN, props, Long.class);
+        addVal(map, IndexFetcher.INDEX_REPLICATED_AT, props, Date.class);
+        addVal(map, IndexFetcher.CONF_FILES_REPLICATED_AT, props, Date.class);
+        addVal(map, IndexFetcher.REPLICATION_FAILED_AT, props, Date.class);
+        addVal(map, IndexFetcher.TIMES_FAILED, props, Integer.class);
+        addVal(map, IndexFetcher.TIMES_INDEX_REPLICATED, props, Integer.class);
+        addVal(map, IndexFetcher.LAST_CYCLE_BYTES_DOWNLOADED, props, Long.class);
+        addVal(map, IndexFetcher.TIMES_CONFIG_REPLICATED, props, Integer.class);
+        addVal(map, IndexFetcher.CONF_FILES_REPLICATED, props, String.class);
       }
-      if (isMaster) {
-        if (includeConfFiles != null) list.add("confFilesToReplicate", includeConfFiles);
-        list.add(REPLICATE_AFTER, getReplicateAfterStrings());
-        list.add("replicationEnabled", String.valueOf(replicationEnabled.get()));
-      }
-    }
-    return list;
+    });
+    manager.registerGauge(this, registry, fetcherMap, true, "fetcher", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> isMaster && includeConfFiles != null ? includeConfFiles : "", true,
+        "confFilesToReplicate", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> isMaster ? getReplicateAfterStrings() : Collections.<String>emptyList(), true,
+        REPLICATE_AFTER, getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> isMaster && replicationEnabled.get(), true,
+        "replicationEnabled", getCategory().toString(), scope);
   }
 
   /**
@@ -1064,24 +1074,39 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
   }
 
   private void addVal(NamedList<Object> nl, String key, Properties props, Class clzz) {
+    Object val = formatVal(key, props, clzz);
+    if (val != null) {
+      nl.add(key, val);
+    }
+  }
+
+  private void addVal(Map<String, Object> map, String key, Properties props, Class clzz) {
+    Object val = formatVal(key, props, clzz);
+    if (val != null) {
+      map.put(key, val);
+    }
+  }
+
+  private Object formatVal(String key, Properties props, Class clzz) {
     String s = props.getProperty(key);
-    if (s == null || s.trim().length() == 0) return;
+    if (s == null || s.trim().length() == 0) return null;
     if (clzz == Date.class) {
       try {
         Long l = Long.parseLong(s);
-        nl.add(key, new Date(l).toString());
-      } catch (NumberFormatException e) {/*no op*/ }
+        return new Date(l).toString();
+      } catch (NumberFormatException e) {
+        return null;
+      }
     } else if (clzz == List.class) {
       String ss[] = s.split(",");
       List<String> l = new ArrayList<>();
       for (String s1 : ss) {
         l.add(new Date(Long.parseLong(s1)).toString());
       }
-      nl.add(key, l);
+      return l;
     } else {
-      nl.add(key, s);
+      return s;
     }
-
   }
 
   private List<String> getReplicateAfterStrings() {
@@ -1189,7 +1214,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     boolean enableMaster = isEnabled( master );
 
     if (enableMaster || enableSlave) {
-      if (core.getCoreDescriptor().getCoreContainer().getZkController() != null) {
+      if (core.getCoreContainer().getZkController() != null) {
         LOG.warn("SolrCloud is enabled for core " + core.getName() + " but so is old-style replication. Make sure you" +
             " intend this behavior, it usually indicates a mis-configuration. Master setting is " +
             Boolean.toString(enableMaster) + " and slave setting is " + Boolean.toString(enableSlave));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index 1958e11..4219768 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -17,9 +17,11 @@
 package org.apache.solr.handler;
 
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
 
+import com.codahale.metrics.MetricRegistry;
 import com.google.common.collect.ImmutableList;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
@@ -27,11 +29,10 @@ import com.codahale.metrics.Timer;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.SuppressForbidden;
 import org.apache.solr.core.PluginBag;
 import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.request.SolrQueryRequest;
@@ -42,7 +43,6 @@ import org.apache.solr.util.SolrPluginUtils;
 import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
 import org.apache.solr.api.ApiSupport;
-import org.apache.solr.util.stats.MetricUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,7 +51,7 @@ import static org.apache.solr.core.RequestParams.USEPARAM;
 /**
  *
  */
-public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfoMBean, SolrMetricProducer, NestedRequestHandler,ApiSupport {
+public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfoBean, SolrMetricProducer, NestedRequestHandler,ApiSupport {
 
   protected NamedList initArgs = null;
   protected SolrParams defaults;
@@ -74,6 +74,9 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
 
   private PluginInfo pluginInfo;
 
+  private Set<String> metricNames = new HashSet<>();
+  private MetricRegistry registry;
+
   @SuppressForbidden(reason = "Need currentTimeMillis, used only for stats output")
   public RequestHandlerBase() {
     handlerStart = System.currentTimeMillis();
@@ -138,13 +141,15 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
-    numErrors = manager.meter(registryName, "errors", getCategory().toString(), scope);
-    numServerErrors = manager.meter(registryName, "serverErrors", getCategory().toString(), scope);
-    numClientErrors = manager.meter(registryName, "clientErrors", getCategory().toString(), scope);
-    numTimeouts = manager.meter(registryName, "timeouts", getCategory().toString(), scope);
-    requests = manager.counter(registryName, "requests", getCategory().toString(), scope);
-    requestTimes = manager.timer(registryName, "requestTimes", getCategory().toString(), scope);
-    totalTime = manager.counter(registryName, "totalTime", getCategory().toString(), scope);
+    registry = manager.registry(registryName);
+    numErrors = manager.meter(this, registryName, "errors", getCategory().toString(), scope);
+    numServerErrors = manager.meter(this, registryName, "serverErrors", getCategory().toString(), scope);
+    numClientErrors = manager.meter(this, registryName, "clientErrors", getCategory().toString(), scope);
+    numTimeouts = manager.meter(this, registryName, "timeouts", getCategory().toString(), scope);
+    requests = manager.counter(this, registryName, "requests", getCategory().toString(), scope);
+    requestTimes = manager.timer(this, registryName, "requestTimes", getCategory().toString(), scope);
+    totalTime = manager.counter(this, registryName, "totalTime", getCategory().toString(), scope);
+    manager.registerGauge(this, registryName, () -> handlerStart, true, "handlerStart", getCategory().toString(), scope);
   }
 
   public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) {
@@ -225,24 +230,21 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
 
   @Override
   public abstract String getDescription();
-  @Override
-  public String getSource() { return null; }
-  
-  @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-  
+
   @Override
   public Category getCategory() {
     return Category.QUERY;
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;  // this can be overridden, but not required
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
+  }
 
   @Override
   public SolrRequestHandler getSubHandler(String subPath) {
@@ -285,22 +287,6 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
     return  pluginInfo;
   }
 
-
-  @Override
-  public NamedList<Object> getStatistics() {
-    NamedList<Object> lst = new SimpleOrderedMap<>();
-    lst.add("handlerStart",handlerStart);
-    lst.add("requests", requests.getCount());
-    lst.add("errors", numErrors.getCount());
-    lst.add("serverErrors", numServerErrors.getCount());
-    lst.add("clientErrors", numClientErrors.getCount());
-    lst.add("timeouts", numTimeouts.getCount());
-    // convert totalTime to ms
-    lst.add("totalTime", MetricUtils.nsToMs(totalTime.getCount()));
-    MetricUtils.addMetrics(lst, requestTimes);
-    return lst;
-  }
-
   @Override
   public Collection<Api> getApis() {
     return ImmutableList.of(new ApiBag.ReqHandlerToApi(this, ApiBag.constructSpec(pluginInfo)));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
index f307baa..c80d0d7 100644
--- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java
@@ -60,10 +60,10 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
   private boolean isCloud = false;
 
   public void inform(SolrCore core) {
-    CoreContainer coreContainer = core.getCoreDescriptor().getCoreContainer();
+    CoreContainer coreContainer = core.getCoreContainer();
 
     if(coreContainer.isZooKeeperAware()) {
-      defaultZkhost = core.getCoreDescriptor().getCoreContainer().getZkController().getZkServerAddress();
+      defaultZkhost = core.getCoreContainer().getZkController().getZkServerAddress();
       defaultWorkerCollection = core.getCoreDescriptor().getCollectionName();
       isCloud = true;
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index 2660cba..08ce838 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -436,7 +436,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
 
           log.debug("persisted to version : {} ", latestVersion);
           waitForAllReplicasState(req.getCore().getCoreDescriptor().getCloudDescriptor().getCollectionName(),
-              req.getCore().getCoreDescriptor().getCoreContainer().getZkController(), RequestParams.NAME, latestVersion, 30);
+              req.getCore().getCoreContainer().getZkController(), RequestParams.NAME, latestVersion, 30);
         }
 
       } else {
@@ -495,12 +495,12 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
             ConfigOverlay.RESOURCE_NAME, overlay.toByteArray(), true);
         log.info("Executed config commands successfully and persisted to ZK {}", ops);
         waitForAllReplicasState(req.getCore().getCoreDescriptor().getCloudDescriptor().getCollectionName(),
-            req.getCore().getCoreDescriptor().getCoreContainer().getZkController(),
+            req.getCore().getCoreContainer().getZkController(),
             ConfigOverlay.NAME,
             latestVersion, 30);
       } else {
         SolrResourceLoader.persistConfLocally(loader, ConfigOverlay.RESOURCE_NAME, overlay.toByteArray());
-        req.getCore().getCoreDescriptor().getCoreContainer().reload(req.getCore().getName());
+        req.getCore().getCoreContainer().reload(req.getCore().getName());
         log.info("Executed config commands successfully and persited to File System {}", ops);
       }
 
@@ -702,12 +702,6 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
     return "Edit solrconfig.xml";
   }
 
-
-  @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-
   @Override
   public Category getCategory() {
     return Category.ADMIN;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/StandardRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StandardRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/StandardRequestHandler.java
index d5eae08..f167b1d 100644
--- a/solr/core/src/java/org/apache/solr/handler/StandardRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StandardRequestHandler.java
@@ -18,9 +18,6 @@ package org.apache.solr.handler;
 
 import org.apache.solr.handler.component.*;
 
-import java.net.MalformedURLException;
-import java.net.URL;
-
 /**
  *
  *
@@ -47,14 +44,6 @@ public class StandardRequestHandler extends SearchHandler
   public String getDescription() {
     return "The standard Solr request handler";
   }
-
-  @Override
-  public URL[] getDocs() {
-    try {
-      return new URL[] { new URL("http://wiki.apache.org/solr/StandardRequestHandler") };
-    }
-    catch( MalformedURLException ex ) { return null; }
-  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index d42ded4..515a90b 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -33,84 +33,14 @@ import org.apache.solr.client.solrj.io.ModelCache;
 import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.comp.StreamComparator;
-import org.apache.solr.client.solrj.io.eval.AbsoluteValueEvaluator;
-import org.apache.solr.client.solrj.io.eval.AddEvaluator;
-import org.apache.solr.client.solrj.io.eval.AndEvaluator;
-import org.apache.solr.client.solrj.io.eval.ArcCosineEvaluator;
-import org.apache.solr.client.solrj.io.eval.ArcSineEvaluator;
-import org.apache.solr.client.solrj.io.eval.ArcTangentEvaluator;
-import org.apache.solr.client.solrj.io.eval.CeilingEvaluator;
-import org.apache.solr.client.solrj.io.eval.CoalesceEvaluator;
-import org.apache.solr.client.solrj.io.eval.CosineEvaluator;
-import org.apache.solr.client.solrj.io.eval.CubedRootEvaluator;
-import org.apache.solr.client.solrj.io.eval.DivideEvaluator;
-import org.apache.solr.client.solrj.io.eval.EqualsEvaluator;
-import org.apache.solr.client.solrj.io.eval.ExclusiveOrEvaluator;
-import org.apache.solr.client.solrj.io.eval.FloorEvaluator;
-import org.apache.solr.client.solrj.io.eval.GreaterThanEqualToEvaluator;
-import org.apache.solr.client.solrj.io.eval.GreaterThanEvaluator;
-import org.apache.solr.client.solrj.io.eval.HyperbolicCosineEvaluator;
-import org.apache.solr.client.solrj.io.eval.HyperbolicSineEvaluator;
-import org.apache.solr.client.solrj.io.eval.HyperbolicTangentEvaluator;
-import org.apache.solr.client.solrj.io.eval.IfThenElseEvaluator;
-import org.apache.solr.client.solrj.io.eval.LessThanEqualToEvaluator;
-import org.apache.solr.client.solrj.io.eval.LessThanEvaluator;
-import org.apache.solr.client.solrj.io.eval.ModuloEvaluator;
-import org.apache.solr.client.solrj.io.eval.MultiplyEvaluator;
-import org.apache.solr.client.solrj.io.eval.NaturalLogEvaluator;
-import org.apache.solr.client.solrj.io.eval.NotEvaluator;
-import org.apache.solr.client.solrj.io.eval.OrEvaluator;
-import org.apache.solr.client.solrj.io.eval.PowerEvaluator;
-import org.apache.solr.client.solrj.io.eval.RawValueEvaluator;
-import org.apache.solr.client.solrj.io.eval.RoundEvaluator;
-import org.apache.solr.client.solrj.io.eval.SineEvaluator;
-import org.apache.solr.client.solrj.io.eval.SquareRootEvaluator;
-import org.apache.solr.client.solrj.io.eval.SubtractEvaluator;
-import org.apache.solr.client.solrj.io.eval.TangentEvaluator;
-import org.apache.solr.client.solrj.io.eval.UuidEvaluator;
+import org.apache.solr.client.solrj.io.eval.*;
 import org.apache.solr.client.solrj.io.graph.GatherNodesStream;
 import org.apache.solr.client.solrj.io.graph.ShortestPathStream;
 import org.apache.solr.client.solrj.io.ops.ConcatOperation;
 import org.apache.solr.client.solrj.io.ops.DistinctOperation;
 import org.apache.solr.client.solrj.io.ops.GroupOperation;
 import org.apache.solr.client.solrj.io.ops.ReplaceOperation;
-import org.apache.solr.client.solrj.io.stream.CartesianProductStream;
-import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
-import org.apache.solr.client.solrj.io.stream.CommitStream;
-import org.apache.solr.client.solrj.io.stream.ComplementStream;
-import org.apache.solr.client.solrj.io.stream.DaemonStream;
-import org.apache.solr.client.solrj.io.stream.ExceptionStream;
-import org.apache.solr.client.solrj.io.stream.ExecutorStream;
-import org.apache.solr.client.solrj.io.stream.FacetStream;
-import org.apache.solr.client.solrj.io.stream.FeaturesSelectionStream;
-import org.apache.solr.client.solrj.io.stream.FetchStream;
-import org.apache.solr.client.solrj.io.stream.HashJoinStream;
-import org.apache.solr.client.solrj.io.stream.HavingStream;
-import org.apache.solr.client.solrj.io.stream.InnerJoinStream;
-import org.apache.solr.client.solrj.io.stream.IntersectStream;
-import org.apache.solr.client.solrj.io.stream.JDBCStream;
-import org.apache.solr.client.solrj.io.stream.LeftOuterJoinStream;
-import org.apache.solr.client.solrj.io.stream.MergeStream;
-import org.apache.solr.client.solrj.io.stream.ModelStream;
-import org.apache.solr.client.solrj.io.stream.NullStream;
-import org.apache.solr.client.solrj.io.stream.OuterHashJoinStream;
-import org.apache.solr.client.solrj.io.stream.ParallelStream;
-import org.apache.solr.client.solrj.io.stream.PriorityStream;
-import org.apache.solr.client.solrj.io.stream.RandomStream;
-import org.apache.solr.client.solrj.io.stream.RankStream;
-import org.apache.solr.client.solrj.io.stream.ReducerStream;
-import org.apache.solr.client.solrj.io.stream.RollupStream;
-import org.apache.solr.client.solrj.io.stream.ScoreNodesStream;
-import org.apache.solr.client.solrj.io.stream.SelectStream;
-import org.apache.solr.client.solrj.io.stream.SignificantTermsStream;
-import org.apache.solr.client.solrj.io.stream.SortStream;
-import org.apache.solr.client.solrj.io.stream.StatsStream;
-import org.apache.solr.client.solrj.io.stream.StreamContext;
-import org.apache.solr.client.solrj.io.stream.TextLogitStream;
-import org.apache.solr.client.solrj.io.stream.TopicStream;
-import org.apache.solr.client.solrj.io.stream.TupleStream;
-import org.apache.solr.client.solrj.io.stream.UniqueStream;
-import org.apache.solr.client.solrj.io.stream.UpdateStream;
+import org.apache.solr.client.solrj.io.stream.*;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
 import org.apache.solr.client.solrj.io.stream.expr.Expressible;
@@ -151,6 +81,10 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
     return PermissionNameProvider.Name.READ_PERM;
   }
 
+  public static SolrClientCache getClientCache() {
+    return clientCache;
+  }
+
   public void inform(SolrCore core) {
     
     /* The stream factory will always contain the zkUrl for the given collection
@@ -165,12 +99,12 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
 
     String defaultCollection;
     String defaultZkhost;
-    CoreContainer coreContainer = core.getCoreDescriptor().getCoreContainer();
+    CoreContainer coreContainer = core.getCoreContainer();
     this.coreName = core.getName();
 
     if(coreContainer.isZooKeeperAware()) {
       defaultCollection = core.getCoreDescriptor().getCollectionName();
-      defaultZkhost = core.getCoreDescriptor().getCoreContainer().getZkController().getZkServerAddress();
+      defaultZkhost = core.getCoreContainer().getZkController().getZkServerAddress();
       streamFactory.withCollectionZkHost(defaultCollection, defaultZkhost);
       streamFactory.withDefaultZkHost(defaultZkhost);
       modelCache = new ModelCache(250,
@@ -221,30 +155,34 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("executor", ExecutorStream.class)
       .withFunctionName("null", NullStream.class)
       .withFunctionName("priority", PriorityStream.class)
-      .withFunctionName("significantTerms", SignificantTermsStream.class)
+         .withFunctionName("significantTerms", SignificantTermsStream.class)
       .withFunctionName("cartesianProduct", CartesianProductStream.class)
-      
-      // metrics
-      .withFunctionName("min", MinMetric.class)
+         .withFunctionName("shuffle", ShuffleStream.class)
+         .withFunctionName("calc", CalculatorStream.class)
+      .withFunctionName("eval",EvalStream.class)
+      .withFunctionName("echo", EchoStream.class)
+
+             // metrics
+         .withFunctionName("min", MinMetric.class)
       .withFunctionName("max", MaxMetric.class)
       .withFunctionName("avg", MeanMetric.class)
       .withFunctionName("sum", SumMetric.class)
       .withFunctionName("count", CountMetric.class)
       
       // tuple manipulation operations
-      .withFunctionName("replace", ReplaceOperation.class)
+         .withFunctionName("replace", ReplaceOperation.class)
       .withFunctionName("concat", ConcatOperation.class)
       
       // stream reduction operations
-      .withFunctionName("group", GroupOperation.class)
+         .withFunctionName("group", GroupOperation.class)
       .withFunctionName("distinct", DistinctOperation.class)
       .withFunctionName("having", HavingStream.class)
       
       // Stream Evaluators
-      .withFunctionName("val", RawValueEvaluator.class)
+         .withFunctionName("val", RawValueEvaluator.class)
       
       // Boolean Stream Evaluators
-      .withFunctionName("and", AndEvaluator.class)
+         .withFunctionName("and", AndEvaluator.class)
       .withFunctionName("eor", ExclusiveOrEvaluator.class)
       .withFunctionName("eq", EqualsEvaluator.class)
       .withFunctionName("gt", GreaterThanEvaluator.class)
@@ -252,10 +190,23 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("lt", LessThanEvaluator.class)
       .withFunctionName("lteq", LessThanEqualToEvaluator.class)
       .withFunctionName("not", NotEvaluator.class)
-      .withFunctionName("or", OrEvaluator.class)
-      
+         .withFunctionName("or", OrEvaluator.class)
+
+      // Date Time Evaluators
+         .withFunctionName(TemporalEvaluatorYear.FUNCTION_NAME, TemporalEvaluatorYear.class)
+      .withFunctionName(TemporalEvaluatorMonth.FUNCTION_NAME, TemporalEvaluatorMonth.class)
+      .withFunctionName(TemporalEvaluatorDay.FUNCTION_NAME, TemporalEvaluatorDay.class)
+      .withFunctionName(TemporalEvaluatorDayOfYear.FUNCTION_NAME, TemporalEvaluatorDayOfYear.class)
+         .withFunctionName(TemporalEvaluatorHour.FUNCTION_NAME, TemporalEvaluatorHour.class)
+      .withFunctionName(TemporalEvaluatorMinute.FUNCTION_NAME, TemporalEvaluatorMinute.class)
+         .withFunctionName(TemporalEvaluatorSecond.FUNCTION_NAME, TemporalEvaluatorSecond.class)
+      .withFunctionName(TemporalEvaluatorEpoch.FUNCTION_NAME, TemporalEvaluatorEpoch.class)
+      .withFunctionName(TemporalEvaluatorWeek.FUNCTION_NAME, TemporalEvaluatorWeek.class)
+         .withFunctionName(TemporalEvaluatorQuarter.FUNCTION_NAME, TemporalEvaluatorQuarter.class)
+         .withFunctionName(TemporalEvaluatorDayOfQuarter.FUNCTION_NAME, TemporalEvaluatorDayOfQuarter.class)
+
       // Number Stream Evaluators
-      .withFunctionName("abs", AbsoluteValueEvaluator.class)
+         .withFunctionName("abs", AbsoluteValueEvaluator.class)
       .withFunctionName("add", AddEvaluator.class)
       .withFunctionName("div", DivideEvaluator.class)
       .withFunctionName("mult", MultiplyEvaluator.class)
@@ -263,7 +214,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("log", NaturalLogEvaluator.class)
       .withFunctionName("pow", PowerEvaluator.class)
       .withFunctionName("mod", ModuloEvaluator.class)
-      .withFunctionName("ceil", CeilingEvaluator.class)
+         .withFunctionName("ceil", CeilingEvaluator.class)
       .withFunctionName("floor", FloorEvaluator.class)
       .withFunctionName("sin", SineEvaluator.class)
       .withFunctionName("asin", ArcSineEvaluator.class)
@@ -274,15 +225,17 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("tan", TangentEvaluator.class)
       .withFunctionName("atan", ArcTangentEvaluator.class)
       .withFunctionName("tanh", HyperbolicTangentEvaluator.class)
-      .withFunctionName("round", RoundEvaluator.class)
+         .withFunctionName("round", RoundEvaluator.class)
       .withFunctionName("sqrt", SquareRootEvaluator.class)
       .withFunctionName("cbrt", CubedRootEvaluator.class)
       .withFunctionName("coalesce", CoalesceEvaluator.class)
       .withFunctionName("uuid", UuidEvaluator.class)
 
+
       // Conditional Stream Evaluators
-      .withFunctionName("if", IfThenElseEvaluator.class)
-      .withFunctionName("analyze", AnalyzeEvaluator.class)
+         .withFunctionName("if", IfThenElseEvaluator.class)
+         .withFunctionName("analyze", AnalyzeEvaluator.class)
+         .withFunctionName("convert", ConversionEvaluator.class)
       ;
 
      // This pulls all the overrides and additions from the config

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
index 581fe46..3cb21ab 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
@@ -65,6 +65,7 @@ public class CollectionHandlerApi extends BaseHandlerApiSupport {
     GET_CLUSTER_STATUS_CMD(EndPoint.CLUSTER_CMD_STATUS, GET, REQUESTSTATUS_OP),
     DELETE_CLUSTER_STATUS(EndPoint.CLUSTER_CMD_STATUS_DELETE, DELETE, DELETESTATUS_OP),
     GET_A_COLLECTION(EndPoint.COLLECTION_STATE, GET, CLUSTERSTATUS_OP),
+    LIST_ALIASES(EndPoint.CLUSTER_ALIASES, GET, LISTALIASES_OP),
     CREATE_COLLECTION(EndPoint.COLLECTIONS_COMMANDS,
         POST,
         CREATE_OP,
@@ -290,6 +291,7 @@ public class CollectionHandlerApi extends BaseHandlerApiSupport {
 
   enum EndPoint implements V2EndPoint {
     CLUSTER("cluster"),
+    CLUSTER_ALIASES("cluster.aliases"),
     CLUSTER_CMD("cluster.Commands"),
     CLUSTER_NODES("cluster.nodes"),
     CLUSTER_CMD_STATUS("cluster.commandstatus"),

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 2e17af6..d5c4927 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -52,6 +52,7 @@ import org.apache.solr.cloud.rule.ReplicaAssigner;
 import org.apache.solr.cloud.rule.Rule;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.cloud.Aliases;
 import org.apache.solr.common.cloud.ClusterProperties;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
@@ -460,6 +461,19 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       return req.getParams().required().getAll(null, NAME, "collections");
     }),
     DELETEALIAS_OP(DELETEALIAS, (req, rsp, h) -> req.getParams().required().getAll(null, NAME)),
+
+    /**
+     * Handle cluster status request.
+     * Can return status per specific collection/shard or per all collections.
+     */
+    LISTALIASES_OP(LISTALIASES, (req, rsp, h) -> {
+      ZkStateReader zkStateReader = h.coreContainer.getZkController().getZkStateReader();
+      Aliases aliases = zkStateReader.getAliases();
+      if (aliases != null) {
+        rsp.getValues().add("aliases", aliases.getCollectionAliasMap());
+      }
+      return null;
+    }),
     SPLITSHARD_OP(SPLITSHARD, DEFAULT_COLLECTION_OP_TIMEOUT * 5, true, (req, rsp, h) -> {
       String name = req.getParams().required().get(COLLECTION_PROP);
       // TODO : add support for multiple shards
@@ -859,6 +873,16 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       return null;
     }),
     REPLACENODE_OP(REPLACENODE, (req, rsp, h) -> req.getParams().required().getAll(req.getParams().getAll(null, "parallel"), "source", "target")),
+    MOVEREPLICA_OP(MOVEREPLICA, (req, rsp, h) -> {
+      Map<String, Object> map = req.getParams().required().getAll(null,
+          COLLECTION_PROP);
+
+      return req.getParams().getAll(map,
+          "fromNode",
+          "targetNode",
+          "replica",
+          "shard");
+    }),
     DELETENODE_OP(DELETENODE, (req, rsp, h) -> req.getParams().required().getAll(null, "node"));
     public final CollectionOp fun;
     CollectionAction action;
@@ -881,7 +905,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       for (CollectionOperation op : values()) {
         if (op.action == action) return op;
       }
-      throw new SolrException(ErrorCode.SERVER_ERROR, "No such action" + action);
+      throw new SolrException(ErrorCode.SERVER_ERROR, "No such action " + action);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
index d3489df..3f857e7 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
@@ -306,13 +306,15 @@ public class ConfigSetsHandler extends RequestHandlerBase implements PermissionN
 
   @Override
   public Name getPermissionName(AuthorizationContext ctx) {
-    switch (ctx.getHttpMethod()) {
-      case "GET":
-        return Name.CONFIG_READ_PERM;
-      case "POST":
+    String a = ctx.getParams().get(ConfigSetParams.ACTION);
+    if (a != null) {
+      ConfigSetAction action = ConfigSetAction.get(a);
+      if (action == ConfigSetAction.CREATE || action == ConfigSetAction.DELETE || action == ConfigSetAction.UPLOAD) {
         return Name.CONFIG_EDIT_PERM;
-      default:
-        return null;
+      } else if (action == ConfigSetAction.LIST) {
+        return Name.CONFIG_READ_PERM;
+      }
     }
+    return null;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
index 275ec18..6746332 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
@@ -121,7 +121,7 @@ public class CoreAdminHandler extends RequestHandlerBase implements PermissionNa
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
     super.initializeMetrics(manager, registryName, scope);
-    parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, manager.registry(registryName),
+    parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, this, manager.registry(registryName),
         SolrMetricManager.mkName("parallelCoreAdminExecutor", getCategory().name(),scope, "threadPool"));
   }
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java
index b10aed1..122d2cb 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/LoggingHandler.java
@@ -60,7 +60,7 @@ public class LoggingHandler extends RequestHandlerBase implements SolrCoreAware
   @Override
   public void inform(SolrCore core) {
     if (watcher == null) {
-      watcher = core.getCoreDescriptor().getCoreContainer().getLogging();
+      watcher = core.getCoreContainer().getLogging();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
index 8e0b1fb..2db04d9 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
@@ -22,8 +22,6 @@ import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.MalformedURLException;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
@@ -707,14 +705,6 @@ public class LukeRequestHandler extends RequestHandlerBase
     return Category.ADMIN;
   }
 
-  @Override
-  public URL[] getDocs() {
-    try {
-      return new URL[] { new URL("http://wiki.apache.org/solr/LukeRequestHandler") };
-    }
-    catch( MalformedURLException ex ) { return null; }
-  }
-
   ///////////////////////////////////////////////////////////////////////////////////////
 
   static class TermHistogram


[13/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
index 3da3541..1d8aecb 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
@@ -33,7 +33,7 @@ import java.util.Iterator;
 import java.util.List;
 
 /**
- * Accumulates statistics separated by a slot number.
+ * Accumulates statistics separated by a slot number. 
  * There is a separate statistic per slot. The slot is usually an ordinal into a set of values, e.g. tracking a count
  * frequency <em>per term</em>.
  * Sometimes there doesn't need to be a slot distinction, in which case there is just one nominal slot.
@@ -46,8 +46,7 @@ public abstract class SlotAcc implements Closeable {
     this.fcontext = fcontext;
   }
 
-  public void setNextReader(LeafReaderContext readerContext) throws IOException {
-  }
+  public void setNextReader(LeafReaderContext readerContext) throws IOException {}
 
   public abstract void collect(int doc, int slot) throws IOException;
 
@@ -61,7 +60,7 @@ public abstract class SlotAcc implements Closeable {
     int segBase = 0;
     int segMax;
     int adjustedMax = 0;
-    for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) {
+    for (DocIterator docsIt = docs.iterator(); docsIt.hasNext();) {
       final int doc = docsIt.nextDoc();
       if (doc >= adjustedMax) {
         do {
@@ -78,12 +77,11 @@ public abstract class SlotAcc implements Closeable {
         setNextReader(ctx);
       }
       count++;
-      collect(doc - segBase, slot);  // per-seg collectors
+      collect(doc - segBase, slot); // per-seg collectors
     }
     return count;
   }
 
-
   public abstract int compare(int slotA, int slotB);
 
   public abstract Object getValue(int slotNum) throws IOException;
@@ -101,8 +99,7 @@ public abstract class SlotAcc implements Closeable {
   public abstract void resize(Resizer resizer);
 
   @Override
-  public void close() throws IOException {
-  }
+  public void close() throws IOException {}
 
   public static abstract class Resizer {
     public abstract int getNewSize();
@@ -181,15 +178,14 @@ abstract class FuncSlotAcc extends SlotAcc {
   }
 }
 
-
-// have a version that counts the number of times a Slot has been hit?  (for avg... what else?)
+// have a version that counts the number of times a Slot has been hit? (for avg... what else?)
 
 // TODO: make more sense to have func as the base class rather than double?
 // double-slot-func -> func-slot -> slot -> acc
 // double-slot-func -> double-slot -> slot -> acc
 
 abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
-  double[] result;  // TODO: use DoubleArray
+  double[] result; // TODO: use DoubleArray
   double initialValue;
 
   public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
@@ -210,7 +206,6 @@ abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
     return Double.compare(result[slotA], result[slotB]);
   }
 
-
   @Override
   public Object getValue(int slot) {
     return result[slot];
@@ -228,7 +223,7 @@ abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
 }
 
 abstract class IntSlotAcc extends SlotAcc {
-  int[] result;  // use LongArray32
+  int[] result; // use LongArray32
   int initialValue;
 
   public IntSlotAcc(FacetContext fcontext, int numSlots, int initialValue) {
@@ -261,15 +256,13 @@ abstract class IntSlotAcc extends SlotAcc {
   }
 }
 
-
-
 class SumSlotAcc extends DoubleFuncSlotAcc {
   public SumSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
     super(values, fcontext, numSlots);
   }
 
   public void collect(int doc, int slotNum) throws IOException {
-    double val = values.doubleVal(doc);  // todo: worth trying to share this value across multiple stats that need it?
+    double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it?
     result[slotNum] += val;
   }
 }
@@ -287,8 +280,6 @@ class SumsqSlotAcc extends DoubleFuncSlotAcc {
   }
 }
 
-
-
 class MinSlotAcc extends DoubleFuncSlotAcc {
   public MinSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
     super(values, fcontext, numSlots, Double.NaN);
@@ -297,10 +288,10 @@ class MinSlotAcc extends DoubleFuncSlotAcc {
   @Override
   public void collect(int doc, int slotNum) throws IOException {
     double val = values.doubleVal(doc);
-    if (val == 0 && !values.exists(doc)) return;  // depend on fact that non existing values return 0 for func query
+    if (val == 0 && !values.exists(doc)) return; // depend on fact that non existing values return 0 for func query
 
     double currMin = result[slotNum];
-    if (!(val >= currMin)) {  // val>=currMin will be false for staring value: val>=NaN
+    if (!(val >= currMin)) { // val>=currMin will be false for staring value: val>=NaN
       result[slotNum] = val;
     }
   }
@@ -314,17 +305,16 @@ class MaxSlotAcc extends DoubleFuncSlotAcc {
   @Override
   public void collect(int doc, int slotNum) throws IOException {
     double val = values.doubleVal(doc);
-    if (val == 0 && !values.exists(doc)) return;  // depend on fact that non existing values return 0 for func query
+    if (val == 0 && !values.exists(doc)) return; // depend on fact that non existing values return 0 for func query
 
     double currMax = result[slotNum];
-    if (!(val <= currMax)) {  // reversed order to handle NaN
+    if (!(val <= currMax)) { // reversed order to handle NaN
       result[slotNum] = val;
     }
   }
 
 }
 
-
 class AvgSlotAcc extends DoubleFuncSlotAcc {
   int[] counts;
 
@@ -336,7 +326,7 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   @Override
   public void reset() {
     super.reset();
-    for (int i=0; i<counts.length; i++) {
+    for (int i = 0; i < counts.length; i++) {
       counts[i] = 0;
     }
   }
@@ -351,11 +341,12 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   }
 
   private double avg(double tot, int count) {
-    return count==0 ? 0 : tot/count;  // returns 0 instead of NaN.. todo - make configurable? if NaN, we need to handle comparisons though...
+    return count == 0 ? 0 : tot / count; // returns 0 instead of NaN.. todo - make configurable? if NaN, we need to
+                                         // handle comparisons though...
   }
 
   private double avg(int slot) {
-    return avg(result[slot], counts[slot]);  // calc once and cache in result?
+    return avg(result[slot], counts[slot]); // calc once and cache in result?
   }
 
   @Override
@@ -367,8 +358,8 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   public Object getValue(int slot) {
     if (fcontext.isShard()) {
       ArrayList lst = new ArrayList(2);
-      lst.add( counts[slot] );
-      lst.add( result[slot] );
+      lst.add(counts[slot]);
+      lst.add(result[slot]);
       return lst;
     } else {
       return avg(slot);
@@ -382,32 +373,157 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   }
 }
 
+class VarianceSlotAcc extends DoubleFuncSlotAcc {
+  int[] counts;
+  double[] sum;
+
+  public VarianceSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+    super(values, fcontext, numSlots);
+    counts = new int[numSlots];
+    sum = new double[numSlots];
+  }
+
+  @Override
+  public void reset() {
+    super.reset();
+    Arrays.fill(counts, 0);
+    Arrays.fill(sum, 0);
+  }
+
+  @Override
+  public void resize(Resizer resizer) {
+    super.resize(resizer);
+    this.counts = resizer.resize(this.counts, 0);
+    this.sum = resizer.resize(this.sum, 0);
+  }
+
+  private double variance(double sumSq, double sum, int count) {
+    double val = count == 0 ? 0 : (sumSq / count) - Math.pow(sum / count, 2);
+    return val;
+  }
+
+  private double variance(int slot) {
+    return variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
+  }
+
+  @Override
+  public int compare(int slotA, int slotB) {
+    return Double.compare(this.variance(slotA), this.variance(slotB));
+  }
+
+  @Override
+  public Object getValue(int slot) {
+    if (fcontext.isShard()) {
+      ArrayList lst = new ArrayList(3);
+      lst.add(counts[slot]);
+      lst.add(result[slot]);
+      lst.add(sum[slot]);
+      return lst;
+    } else {
+      return this.variance(slot);
+    }
+  }
+
+  @Override
+  public void collect(int doc, int slot) throws IOException {
+    double val = values.doubleVal(doc);
+    if (values.exists(doc)) {
+      counts[slot]++;
+      result[slot] += val * val;
+      sum[slot] += val;
+    }
+  }
+}
+
+class StddevSlotAcc extends DoubleFuncSlotAcc {
+  int[] counts;
+  double[] sum;
+
+  public StddevSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+    super(values, fcontext, numSlots);
+    counts = new int[numSlots];
+    sum = new double[numSlots];
+  }
+
+  @Override
+  public void reset() {
+    super.reset();
+    Arrays.fill(counts, 0);
+    Arrays.fill(sum, 0);
+  }
+
+  @Override
+  public void resize(Resizer resizer) {
+    super.resize(resizer);
+    this.counts = resizer.resize(this.counts, 0);
+    this.result = resizer.resize(this.result, 0);
+  }
+
+  private double stdDev(double sumSq, double sum, int count) {
+    double val = count == 0 ? 0 : Math.sqrt((sumSq / count) - Math.pow(sum / count, 2)); 
+    return val;
+  }
+
+  private double stdDev(int slot) {
+    return stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
+  }
+
+  @Override
+  public int compare(int slotA, int slotB) {
+    return Double.compare(this.stdDev(slotA), this.stdDev(slotB));
+  }
+
+  @Override
+  public Object getValue(int slot) {
+    if (fcontext.isShard()) {
+      ArrayList lst = new ArrayList(3);
+      lst.add(counts[slot]);
+      lst.add(result[slot]);
+      lst.add(sum[slot]);
+      return lst;
+    } else {
+      return this.stdDev(slot);
+    }
+  }
+
+  @Override
+  public void collect(int doc, int slot) throws IOException {
+    double val = values.doubleVal(doc);
+    if (values.exists(doc)) {
+      counts[slot]++;
+      result[slot] += val * val;
+      sum[slot] += val;
+    }
+  }
+}
+
 abstract class CountSlotAcc extends SlotAcc {
   public CountSlotAcc(FacetContext fcontext) {
     super(fcontext);
   }
 
   public abstract void incrementCount(int slot, int count);
+
   public abstract int getCount(int slot);
 }
 
-
-
 class CountSlotArrAcc extends CountSlotAcc {
   int[] result;
+
   public CountSlotArrAcc(FacetContext fcontext, int numSlots) {
     super(fcontext);
     result = new int[numSlots];
   }
 
   @Override
-  public void collect(int doc, int slotNum) {       // TODO: count arrays can use fewer bytes based on the number of docs in the base set (that's the upper bound for single valued) - look at ttf?
+  public void collect(int doc, int slotNum) { // TODO: count arrays can use fewer bytes based on the number of docs in
+                                              // the base set (that's the upper bound for single valued) - look at ttf?
     result[slotNum]++;
   }
 
   @Override
   public int compare(int slotA, int slotB) {
-    return Integer.compare( result[slotA], result[slotB] );
+    return Integer.compare(result[slotA], result[slotB]);
   }
 
   @Override
@@ -439,7 +555,6 @@ class CountSlotArrAcc extends CountSlotAcc {
   }
 }
 
-
 class SortSlotAcc extends SlotAcc {
   public SortSlotAcc(FacetContext fcontext) {
     super(fcontext);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
new file mode 100644
index 0000000..917df6e
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search.facet;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.lucene.queries.function.ValueSource;
+
+
+public class StddevAgg extends SimpleAggValueSource {
+  public StddevAgg(ValueSource vs) {
+    super("stddev", vs);
+  }
+
+  @Override
+  public SlotAcc createSlotAcc(FacetContext fcontext, int numDocs, int numSlots) throws IOException {
+    return new StddevSlotAcc(getArg(), fcontext, numSlots);
+  }
+
+  @Override
+  public FacetMerger createFacetMerger(Object prototype) {
+    return new Merger();
+  }
+
+  private static class Merger extends FacetDoubleMerger {
+    long count;
+    double sumSq;
+    double sum;
+    
+    @Override
+    @SuppressWarnings("unchecked")
+    public void merge(Object facetResult, Context mcontext1) {
+      List<Number> numberList = (List<Number>)facetResult;
+      this.count += numberList.get(0).longValue();
+      this.sumSq += numberList.get(1).doubleValue();
+      this.sum += numberList.get(2).doubleValue();
+    }
+
+    @Override
+    public Object getMergedResult() {
+      return this.getDouble();
+    }
+    
+    @Override
+    protected double getDouble() {      
+      double val = count == 0 ? 0.0d : Math.sqrt((sumSq/count)-Math.pow(sum/count, 2));
+      return val;
+    }    
+  };
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
new file mode 100644
index 0000000..ec6955f
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search.facet;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.lucene.queries.function.ValueSource;
+
+
+public class VarianceAgg extends SimpleAggValueSource {
+  public VarianceAgg(ValueSource vs) {
+    super("variance", vs);
+  }
+
+  @Override
+  public SlotAcc createSlotAcc(FacetContext fcontext, int numDocs, int numSlots) throws IOException {
+    return new VarianceSlotAcc(getArg(), fcontext, numSlots);
+  }
+
+  @Override
+  public FacetMerger createFacetMerger(Object prototype) {
+    return new Merger();
+  }
+
+  private static class Merger extends FacetDoubleMerger {
+    long count;
+    double sumSq;
+    double sum;
+    
+    @Override
+    @SuppressWarnings("unchecked")
+    public void merge(Object facetResult, Context mcontext1) {
+      List<Number> numberList = (List<Number>)facetResult;
+      this.count += numberList.get(0).longValue();
+      this.sumSq += numberList.get(1).doubleValue();
+      this.sum += numberList.get(2).doubleValue();
+    }
+
+    @Override
+    public Object getMergedResult() {
+      return this.getDouble();
+    }
+    
+    @Override
+    protected double getDouble() {      
+      double val = count == 0 ? 0.0d : (sumSq/count)-Math.pow(sum/count, 2);
+      return val;
+    }    
+  };
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/Command.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/Command.java b/solr/core/src/java/org/apache/solr/search/grouping/Command.java
index 55e2d96..7391df6 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/Command.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/Command.java
@@ -60,6 +60,6 @@ public interface Command<T> {
   /**
    * @return The sort inside a group
    */
-  Sort getSortWithinGroup();
+  Sort getWithinGroupSort();
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
index 2dd2291..4ec01db 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
@@ -32,8 +32,8 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TimeLimitingCollector;
 import org.apache.lucene.search.TotalHitCountCollector;
 import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupHeadsCollector;
+import org.apache.lucene.search.grouping.TermGroupSelector;
+import org.apache.lucene.search.grouping.ValueSourceGroupSelector;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
@@ -174,9 +174,11 @@ public class CommandHandler {
     final AllGroupHeadsCollector allGroupHeadsCollector;
     if (fieldType.getNumberType() != null) {
       ValueSource vs = fieldType.getValueSource(sf, null);
-      allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(vs, new HashMap(), firstCommand.getSortWithinGroup());
+      allGroupHeadsCollector = AllGroupHeadsCollector.newCollector(new ValueSourceGroupSelector(vs, new HashMap<>()),
+          firstCommand.getWithinGroupSort());
     } else {
-      allGroupHeadsCollector = TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
+      allGroupHeadsCollector
+          = AllGroupHeadsCollector.newCollector(new TermGroupSelector(firstCommand.getKey()), firstCommand.getWithinGroupSort());
     }
     if (collectors.isEmpty()) {
       searchWithTimeLimiter(query, filter, allGroupHeadsCollector);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
index afb8ba7..1615237 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
@@ -149,7 +149,7 @@ public class QueryCommand implements Command<QueryCommandResult> {
   }
 
   @Override
-  public Sort getSortWithinGroup() {
+  public Sort getWithinGroupSort() {
     return null;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
index d5f9f9d..b81dda0 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
@@ -16,24 +16,26 @@
  */
 package org.apache.solr.search.grouping.distributed.command;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.Collector;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.grouping.AllGroupsCollector;
 import org.apache.lucene.search.grouping.FirstPassGroupingCollector;
 import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector;
-import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
-import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector;
+import org.apache.lucene.search.grouping.TermGroupSelector;
+import org.apache.lucene.search.grouping.ValueSourceGroupSelector;
 import org.apache.lucene.util.BytesRef;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.grouping.Command;
 
-import java.io.IOException;
-import java.util.*;
-
 /**
  * Creates all the collectors needed for the first phase and how to handle the results.
  */
@@ -98,18 +100,20 @@ public class SearchGroupsFieldCommand implements Command<SearchGroupsFieldComman
     if (topNGroups > 0) {
       if (fieldType.getNumberType() != null) {
         ValueSource vs = fieldType.getValueSource(field, null);
-        firstPassGroupingCollector = new FunctionFirstPassGroupingCollector(vs, new HashMap<Object,Object>(), groupSort, topNGroups);
+        firstPassGroupingCollector
+            = new FirstPassGroupingCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()), groupSort, topNGroups);
       } else {
-        firstPassGroupingCollector = new TermFirstPassGroupingCollector(field.getName(), groupSort, topNGroups);
+        firstPassGroupingCollector
+            = new FirstPassGroupingCollector<>(new TermGroupSelector(field.getName()), groupSort, topNGroups);
       }
       collectors.add(firstPassGroupingCollector);
     }
     if (includeGroupCount) {
       if (fieldType.getNumberType() != null) {
         ValueSource vs = fieldType.getValueSource(field, null);
-        allGroupsCollector = new FunctionAllGroupsCollector(vs, new HashMap<Object,Object>());
+        allGroupsCollector = new AllGroupsCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()));
       } else {
-        allGroupsCollector = new TermAllGroupsCollector(field.getName());
+        allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(field.getName()));
       }
       collectors.add(allGroupsCollector);
     }
@@ -138,7 +142,7 @@ public class SearchGroupsFieldCommand implements Command<SearchGroupsFieldComman
   }
 
   @Override
-  public Sort getSortWithinGroup() {
+  public Sort getWithinGroupSort() {
     return null;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
index 2c6c401..a496278 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
@@ -16,28 +16,28 @@
  */
 package org.apache.solr.search.grouping.distributed.command;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.Collector;
 import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.SecondPassGroupingCollector;
 import org.apache.lucene.search.grouping.GroupDocs;
 import org.apache.lucene.search.grouping.SearchGroup;
+import org.apache.lucene.search.grouping.TermGroupSelector;
 import org.apache.lucene.search.grouping.TopGroups;
-import org.apache.lucene.search.grouping.function.FunctionSecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector;
+import org.apache.lucene.search.grouping.TopGroupsCollector;
+import org.apache.lucene.search.grouping.ValueSourceGroupSelector;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.mutable.MutableValue;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.grouping.Command;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-
 /**
  * Defines all collectors for retrieving the second phase and how to handle the collector result.
  */
@@ -47,7 +47,7 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
 
     private SchemaField field;
     private Sort groupSort;
-    private Sort sortWithinGroup;
+    private Sort withinGroupSort;
     private Collection<SearchGroup<BytesRef>> firstPhaseGroups;
     private Integer maxDocPerGroup;
     private boolean needScores = false;
@@ -63,8 +63,8 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
       return this;
     }
 
-    public Builder setSortWithinGroup(Sort sortWithinGroup) {
-      this.sortWithinGroup = sortWithinGroup;
+    public Builder setSortWithinGroup(Sort withinGroupSort) {
+      this.withinGroupSort = withinGroupSort;
       return this;
     }
 
@@ -89,35 +89,35 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
     }
 
     public TopGroupsFieldCommand build() {
-      if (field == null || groupSort == null ||  sortWithinGroup == null || firstPhaseGroups == null ||
+      if (field == null || groupSort == null ||  withinGroupSort == null || firstPhaseGroups == null ||
           maxDocPerGroup == null) {
         throw new IllegalStateException("All required fields must be set");
       }
 
-      return new TopGroupsFieldCommand(field, groupSort, sortWithinGroup, firstPhaseGroups, maxDocPerGroup, needScores, needMaxScore);
+      return new TopGroupsFieldCommand(field, groupSort, withinGroupSort, firstPhaseGroups, maxDocPerGroup, needScores, needMaxScore);
     }
 
   }
 
   private final SchemaField field;
   private final Sort groupSort;
-  private final Sort sortWithinGroup;
+  private final Sort withinGroupSort;
   private final Collection<SearchGroup<BytesRef>> firstPhaseGroups;
   private final int maxDocPerGroup;
   private final boolean needScores;
   private final boolean needMaxScore;
-  private SecondPassGroupingCollector secondPassCollector;
+  private TopGroupsCollector secondPassCollector;
 
   private TopGroupsFieldCommand(SchemaField field,
                                 Sort groupSort,
-                                Sort sortWithinGroup,
+                                Sort withinGroupSort,
                                 Collection<SearchGroup<BytesRef>> firstPhaseGroups,
                                 int maxDocPerGroup,
                                 boolean needScores,
                                 boolean needMaxScore) {
     this.field = field;
     this.groupSort = groupSort;
-    this.sortWithinGroup = sortWithinGroup;
+    this.withinGroupSort = withinGroupSort;
     this.firstPhaseGroups = firstPhaseGroups;
     this.maxDocPerGroup = maxDocPerGroup;
     this.needScores = needScores;
@@ -135,12 +135,12 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
     if (fieldType.getNumberType() != null) {
       ValueSource vs = fieldType.getValueSource(field, null);
       Collection<SearchGroup<MutableValue>> v = GroupConverter.toMutable(field, firstPhaseGroups);
-      secondPassCollector = new FunctionSecondPassGroupingCollector(
-          v, groupSort, sortWithinGroup, maxDocPerGroup, needScores, needMaxScore, true, vs, new HashMap<Object,Object>()
+      secondPassCollector = new TopGroupsCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()),
+          v, groupSort, withinGroupSort, maxDocPerGroup, needScores, needMaxScore, true
       );
     } else {
-      secondPassCollector = new TermSecondPassGroupingCollector(
-          field.getName(), firstPhaseGroups, groupSort, sortWithinGroup, maxDocPerGroup, needScores, needMaxScore, true
+      secondPassCollector = new TopGroupsCollector<>(new TermGroupSelector(field.getName()),
+          firstPhaseGroups, groupSort, withinGroupSort, maxDocPerGroup, needScores, needMaxScore, true
       );
     }
     collectors.add(secondPassCollector);
@@ -151,7 +151,7 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
   @SuppressWarnings("unchecked")
   public TopGroups<BytesRef> result() {
     if (firstPhaseGroups.isEmpty()) {
-      return new TopGroups<>(groupSort.getSort(), sortWithinGroup.getSort(), 0, 0, new GroupDocs[0], Float.NaN);
+      return new TopGroups<>(groupSort.getSort(), withinGroupSort.getSort(), 0, 0, new GroupDocs[0], Float.NaN);
     }
 
     FieldType fieldType = field.getType();
@@ -173,7 +173,7 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
   }
 
   @Override
-  public Sort getSortWithinGroup() {
-    return sortWithinGroup;
+  public Sort getWithinGroupSort() {
+    return withinGroupSort;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
index a12cad1..ab13f72 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
@@ -55,8 +55,8 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
     SortSpec ss = rb.getSortSpec();
     Sort groupSort = rb.getGroupingSpec().getGroupSort();
     final String[] fields = rb.getGroupingSpec().getFields();
-    Sort sortWithinGroup = rb.getGroupingSpec().getSortWithinGroup();
-    assert sortWithinGroup != null;
+    Sort withinGroupSort = rb.getGroupingSpec().getSortWithinGroup();
+    assert withinGroupSort != null;
 
     final Map<String, List<Collection<SearchGroup<BytesRef>>>> commandSearchGroups = new HashMap<>(fields.length, 1.0f);
     final Map<String, Map<SearchGroup<BytesRef>, Set<String>>> tempSearchGroupToShards = new HashMap<>(fields.length, 1.0f);
@@ -111,7 +111,7 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
       maxElapsedTime = (int) Math.max(maxElapsedTime, srsp.getSolrResponse().getElapsedTime());
       @SuppressWarnings("unchecked")
       NamedList<NamedList> firstPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("firstPhase");
-      final Map<String, SearchGroupsFieldCommandResult> result = serializer.transformToNative(firstPhaseResult, groupSort, sortWithinGroup, srsp.getShard());
+      final Map<String, SearchGroupsFieldCommandResult> result = serializer.transformToNative(firstPhaseResult, groupSort, withinGroupSort, srsp.getShard());
       for (String field : commandSearchGroups.keySet()) {
         final SearchGroupsFieldCommandResult firstPhaseCommandResult = result.get(field);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
index 2ac83c6..231e9bd 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
@@ -58,8 +58,8 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
     Sort groupSort = rb.getGroupingSpec().getGroupSort();
     String[] fields = rb.getGroupingSpec().getFields();
     String[] queries = rb.getGroupingSpec().getQueries();
-    Sort sortWithinGroup = rb.getGroupingSpec().getSortWithinGroup();
-    assert sortWithinGroup != null;
+    Sort withinGroupSort = rb.getGroupingSpec().getSortWithinGroup();
+    assert withinGroupSort != null;
 
     // If group.format=simple group.offset doesn't make sense
     int groupOffsetDefault;
@@ -122,7 +122,7 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
       NamedList<NamedList> secondPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("secondPhase");
       if(secondPhaseResult == null)
         continue;
-      Map<String, ?> result = serializer.transformToNative(secondPhaseResult, groupSort, sortWithinGroup, srsp.getShard());
+      Map<String, ?> result = serializer.transformToNative(secondPhaseResult, groupSort, withinGroupSort, srsp.getShard());
       int numFound = 0;
       float maxScore = Float.NaN;
       for (String field : commandTopGroups.keySet()) {
@@ -164,7 +164,7 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
           docsPerGroup += subTopGroups.totalGroupedHitCount;
         }
       }
-      rb.mergedTopGroups.put(groupField, TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, sortWithinGroup, groupOffsetDefault, docsPerGroup, TopGroups.ScoreMergeMode.None));
+      rb.mergedTopGroups.put(groupField, TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, withinGroupSort, groupOffsetDefault, docsPerGroup, TopGroups.ScoreMergeMode.None));
     }
 
     for (String query : commandTopDocs.keySet()) {
@@ -178,10 +178,10 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
 
       int topN = rb.getGroupingSpec().getOffset() + rb.getGroupingSpec().getLimit();
       final TopDocs mergedTopDocs;
-      if (sortWithinGroup.equals(Sort.RELEVANCE)) {
+      if (withinGroupSort.equals(Sort.RELEVANCE)) {
         mergedTopDocs = TopDocs.merge(topN, topDocs.toArray(new TopDocs[topDocs.size()]));
       } else {
-        mergedTopDocs = TopDocs.merge(sortWithinGroup, topN, topDocs.toArray(new TopFieldDocs[topDocs.size()]));
+        mergedTopDocs = TopDocs.merge(withinGroupSort, topN, topDocs.toArray(new TopFieldDocs[topDocs.size()]));
       }
       rb.mergedQueryCommandResults.put(query, new QueryCommandResult(mergedTopDocs, mergedMatches));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
index 2602221..77dfcef 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
@@ -77,7 +77,7 @@ public class SearchGroupsResultTransformer implements ShardResultTransformer<Lis
    * {@inheritDoc}
    */
   @Override
-  public Map<String, SearchGroupsFieldCommandResult> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort sortWithinGroup, String shard) {
+  public Map<String, SearchGroupsFieldCommandResult> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort withinGroupSort, String shard) {
     final Map<String, SearchGroupsFieldCommandResult> result = new HashMap<>(shardResponse.size());
     for (Map.Entry<String, NamedList> command : shardResponse) {
       List<SearchGroup<BytesRef>> searchGroups = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
index 04a3dfc..47e20a0 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
@@ -44,10 +44,10 @@ public interface ShardResultTransformer<T, R> {
    *
    * @param shardResponse The shard response containing data in a {@link NamedList} structure
    * @param groupSort The group sort
-   * @param sortWithinGroup The sort inside a group
+   * @param withinGroupSort The sort inside a group
    * @param shard The shard address where the response originated from
    * @return native structure of the data
    */
-  R transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort sortWithinGroup, String shard);
+  R transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort withinGroupSort, String shard);
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
index 83c81e5..41145ba 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
@@ -92,7 +92,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
    * {@inheritDoc}
    */
   @Override
-  public Map<String, ?> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort sortWithinGroup, String shard) {
+  public Map<String, ?> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort withinGroupSort, String shard) {
     Map<String, Object> result = new HashMap<>();
 
     final IndexSchema schema = rb.req.getSearcher().getSchema();
@@ -113,10 +113,10 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
         List<NamedList<Object>> documents = (List<NamedList<Object>>) commandResult.get("documents");
         ScoreDoc[] scoreDocs = transformToNativeShardDoc(documents, groupSort, shard, schema);
         final TopDocs topDocs;
-        if (sortWithinGroup.equals(Sort.RELEVANCE)) {
+        if (withinGroupSort.equals(Sort.RELEVANCE)) {
           topDocs = new TopDocs(totalHits, scoreDocs, maxScore);
         } else {
-          topDocs = new TopFieldDocs(totalHits, scoreDocs, sortWithinGroup.getSort(), maxScore);
+          topDocs = new TopFieldDocs(totalHits, scoreDocs, withinGroupSort.getSort(), maxScore);
         }
         result.put(key, new QueryCommandResult(topDocs, matches));
         continue;
@@ -137,7 +137,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
 
         @SuppressWarnings("unchecked")
         List<NamedList<Object>> documents = (List<NamedList<Object>>) groupResult.get("documents");
-        ScoreDoc[] scoreDocs = transformToNativeShardDoc(documents, sortWithinGroup, shard, schema);
+        ScoreDoc[] scoreDocs = transformToNativeShardDoc(documents, withinGroupSort, shard, schema);
 
         BytesRef groupValueRef = groupValue != null ? new BytesRef(groupValue) : null;
         groupDocs.add(new GroupDocs<>(Float.NaN, maxScore, totalGroupHits, scoreDocs, groupValueRef, null));
@@ -146,7 +146,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
       @SuppressWarnings("unchecked")
       GroupDocs<BytesRef>[] groupDocsArr = groupDocs.toArray(new GroupDocs[groupDocs.size()]);
       TopGroups<BytesRef> topGroups = new TopGroups<>(
-           groupSort.getSort(), sortWithinGroup.getSort(), totalHitCount, totalGroupedHitCount, groupDocsArr, Float.NaN
+           groupSort.getSort(), withinGroupSort.getSort(), totalHitCount, totalGroupedHitCount, groupDocsArr, Float.NaN
       );
 
       result.put(key, topGroups);
@@ -222,8 +222,8 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
         Object[] convertedSortValues  = new Object[fieldDoc.fields.length];
         for (int j = 0; j < fieldDoc.fields.length; j++) {
           Object sortValue  = fieldDoc.fields[j];
-          Sort sortWithinGroup = rb.getGroupingSpec().getSortWithinGroup();
-          SchemaField field = sortWithinGroup.getSort()[j].getField() != null ? schema.getFieldOrNull(sortWithinGroup.getSort()[j].getField()) : null;
+          Sort withinGroupSort = rb.getGroupingSpec().getSortWithinGroup();
+          SchemaField field = withinGroupSort.getSort()[j].getField() != null ? schema.getFieldOrNull(withinGroupSort.getSort()[j].getField()) : null;
           if (field != null) {
             FieldType fieldType = field.getType();
             if (sortValue != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
index 999cd64..edccc88 100644
--- a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
@@ -89,7 +89,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
     public Query rewrite(IndexReader reader) throws IOException {
       SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
 
-      CoreContainer container = info.getReq().getCore().getCoreDescriptor().getCoreContainer();
+      CoreContainer container = info.getReq().getCore().getCoreContainer();
 
       final SolrCore fromCore = container.getCore(fromIndex);
 
@@ -222,7 +222,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
         final String myCore = req.getCore().getCoreDescriptor().getName();
 
         if (fromIndex != null && (!fromIndex.equals(myCore) || byPassShortCircutCheck)) {
-          CoreContainer container = req.getCore().getCoreDescriptor().getCoreContainer();
+          CoreContainer container = req.getCore().getCoreContainer();
 
           final String coreName = getCoreName(fromIndex, container);
           final SolrCore fromCore = container.getCore(coreName);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index ff0db9b..39ccadc 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -16,7 +16,6 @@
  */
 package org.apache.solr.servlet;
 
-import javax.management.MBeanServer;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
 import javax.servlet.ServletException;
@@ -34,7 +33,6 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.lang.invoke.MethodHandles;
-import java.lang.management.ManagementFactory;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.time.Instant;
@@ -42,12 +40,12 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Locale;
 import java.util.Properties;
+import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import com.codahale.metrics.jvm.BufferPoolMetricSet;
 import com.codahale.metrics.jvm.ClassLoadingGaugeSet;
 import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
 import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
@@ -66,9 +64,11 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.NodeConfig;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.core.SolrXmlConfig;
+import org.apache.solr.metrics.AltBufferPoolMetricSet;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.OperatingSystemMetricSet;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.request.SolrRequestInfo;
@@ -185,16 +185,24 @@ public class SolrDispatchFilter extends BaseSolrFilter {
   }
 
   private void setupJvmMetrics()  {
-    MBeanServer platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
     SolrMetricManager metricManager = cores.getMetricManager();
+    final Set<String> hiddenSysProps = cores.getConfig().getHiddenSysProps();
     try {
-      String registry = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm);
-      metricManager.registerAll(registry, new BufferPoolMetricSet(platformMBeanServer), true, "buffers");
+      String registry = SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm);
+      metricManager.registerAll(registry, new AltBufferPoolMetricSet(), true, "buffers");
       metricManager.registerAll(registry, new ClassLoadingGaugeSet(), true, "classes");
-      metricManager.registerAll(registry, new OperatingSystemMetricSet(platformMBeanServer), true, "os");
+      metricManager.registerAll(registry, new OperatingSystemMetricSet(), true, "os");
       metricManager.registerAll(registry, new GarbageCollectorMetricSet(), true, "gc");
       metricManager.registerAll(registry, new MemoryUsageGaugeSet(), true, "memory");
       metricManager.registerAll(registry, new ThreadStatesGaugeSet(), true, "threads"); // todo should we use CachedThreadStatesGaugeSet instead?
+      MetricsMap sysprops = new MetricsMap((detailed, map) -> {
+        System.getProperties().forEach((k, v) -> {
+          if (!hiddenSysProps.contains(k)) {
+            map.put(String.valueOf(k), v);
+          }
+        });
+      });
+      metricManager.registerGauge(null, registry, sysprops, true, "properties", "system");
     } catch (Exception e) {
       log.warn("Error registering JVM metrics", e);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
index 3394de1..75917d0 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
@@ -15,6 +15,8 @@
  * limitations under the License.
  */
 package org.apache.solr.spelling;
+import static org.apache.solr.common.params.CommonParams.ID;
+
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -132,6 +134,19 @@ public class SpellCheckCollator {
         params.remove(DisMaxParams.BF);
         // Collate testing does not support Grouping (see SOLR-2577)
         params.remove(GroupParams.GROUP);
+        
+        // Collate testing does not support the Collapse QParser (See SOLR-8807)
+        params.remove("expand");
+        String[] filters = params.getParams(CommonParams.FQ);
+        if (filters != null) {
+          List<String> filtersToApply = new ArrayList<>(filters.length);
+          for (String fq : filters) {
+            if (!fq.startsWith("{!collapse")) {
+              filtersToApply.add(fq);
+            }
+          }
+          params.set("fq", filtersToApply.toArray(new String[filtersToApply.size()]));
+        }      
 
         // creating a request here... make sure to close it!
         ResponseBuilder checkResponse = new ResponseBuilder(

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java b/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
index 6499c50..4cc75b5 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
@@ -89,7 +89,7 @@ public class SpellingQueryConverter extends QueryConverter  {
     NMTOKEN = "([" + sb.toString() + "]|" + SURROGATE_PAIR + ")+";
   }
 
-  final static String PATTERN = "(?:(?!(" + NMTOKEN + ":|[\\^.]\\d+)))[^^.\\s][\\p{L}_\\-0-9]+";
+  final static String PATTERN = "(?:(?!(" + NMTOKEN + ":|[\\^.]\\d+)))[^^.:(\\s][\\p{L}_\\-0-9]+";
   // previous version: Pattern.compile("(?:(?!(\\w+:|\\d+)))\\w+");
   protected Pattern QUERY_REGEX = Pattern.compile(PATTERN);
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
index d3e3497..b8b9bea 100644
--- a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
+++ b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
@@ -16,20 +16,23 @@
  */
 package org.apache.solr.store.blockcache;
 
-import java.net.URL;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrInfoMBean;
+import com.codahale.metrics.MetricRegistry;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.search.SolrCacheBase;
 
 /**
- * A {@link SolrInfoMBean} that provides metrics on block cache operations.
+ * A {@link SolrInfoBean} that provides metrics on block cache operations.
  *
  * @lucene.experimental
  */
-public class Metrics extends SolrCacheBase implements SolrInfoMBean {
+public class Metrics extends SolrCacheBase implements SolrInfoBean, SolrMetricProducer {
 
 
   public AtomicLong blockCacheSize = new AtomicLong(0);
@@ -50,66 +53,70 @@ public class Metrics extends SolrCacheBase implements SolrInfoMBean {
   public AtomicLong shardBuffercacheAllocate = new AtomicLong(0);
   public AtomicLong shardBuffercacheLost = new AtomicLong(0);
 
+  private MetricsMap metricsMap;
+  private MetricRegistry registry;
+  private Set<String> metricNames = new HashSet<>();
 
   private long previous = System.nanoTime();
 
-
-  public NamedList<Number> getStatistics() {
-    NamedList<Number> stats = new SimpleOrderedMap<>(21); // room for one method call before growing
-
-    long now = System.nanoTime();
-    long delta = Math.max(now - previous, 1);
-    double seconds = delta / 1000000000.0;
-
-    long hits_total = blockCacheHit.get();
-    long hits_delta = hits_total - blockCacheHit_last.get();
-    blockCacheHit_last.set(hits_total);
-
-    long miss_total = blockCacheMiss.get();
-    long miss_delta = miss_total - blockCacheMiss_last.get();
-    blockCacheMiss_last.set(miss_total);
-
-    long evict_total = blockCacheEviction.get();
-    long evict_delta = evict_total - blockCacheEviction_last.get();
-    blockCacheEviction_last.set(evict_total);
-
-    long storeFail_total = blockCacheStoreFail.get();
-    long storeFail_delta = storeFail_total - blockCacheStoreFail_last.get();
-    blockCacheStoreFail_last.set(storeFail_total);
-
-    long lookups_delta = hits_delta + miss_delta;
-    long lookups_total = hits_total + miss_total;
-
-    stats.add("size", blockCacheSize.get());
-    stats.add("lookups", lookups_total);
-    stats.add("hits", hits_total);
-    stats.add("evictions", evict_total);
-    stats.add("storeFails", storeFail_total);
-    stats.add("hitratio_current", calcHitRatio(lookups_delta, hits_delta));  // hit ratio since the last call
-    stats.add("lookups_persec", getPerSecond(lookups_delta, seconds)); // lookups per second since the last call
-    stats.add("hits_persec", getPerSecond(hits_delta, seconds));       // hits per second since the last call
-    stats.add("evictions_persec", getPerSecond(evict_delta, seconds));  // evictions per second since the last call
-    stats.add("storeFails_persec", getPerSecond(storeFail_delta, seconds));  // evictions per second since the last call
-    stats.add("time_delta", seconds);  // seconds since last call
-
-    // TODO: these aren't really related to the BlockCache
-    stats.add("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
-    stats.add("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
-
-    previous = now;
-
-    return stats;
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    metricsMap = new MetricsMap((detailed, map) -> {
+      long now = System.nanoTime();
+      long delta = Math.max(now - previous, 1);
+      double seconds = delta / 1000000000.0;
+
+      long hits_total = blockCacheHit.get();
+      long hits_delta = hits_total - blockCacheHit_last.get();
+      blockCacheHit_last.set(hits_total);
+
+      long miss_total = blockCacheMiss.get();
+      long miss_delta = miss_total - blockCacheMiss_last.get();
+      blockCacheMiss_last.set(miss_total);
+
+      long evict_total = blockCacheEviction.get();
+      long evict_delta = evict_total - blockCacheEviction_last.get();
+      blockCacheEviction_last.set(evict_total);
+
+      long storeFail_total = blockCacheStoreFail.get();
+      long storeFail_delta = storeFail_total - blockCacheStoreFail_last.get();
+      blockCacheStoreFail_last.set(storeFail_total);
+
+      long lookups_delta = hits_delta + miss_delta;
+      long lookups_total = hits_total + miss_total;
+
+      map.put("size", blockCacheSize.get());
+      map.put("lookups", lookups_total);
+      map.put("hits", hits_total);
+      map.put("evictions", evict_total);
+      map.put("storeFails", storeFail_total);
+      map.put("hitratio_current", calcHitRatio(lookups_delta, hits_delta));  // hit ratio since the last call
+      map.put("lookups_persec", getPerSecond(lookups_delta, seconds)); // lookups per second since the last call
+      map.put("hits_persec", getPerSecond(hits_delta, seconds));       // hits per second since the last call
+      map.put("evictions_persec", getPerSecond(evict_delta, seconds));  // evictions per second since the last call
+      map.put("storeFails_persec", getPerSecond(storeFail_delta, seconds));  // evictions per second since the last call
+      map.put("time_delta", seconds);  // seconds since last call
+
+      // TODO: these aren't really related to the BlockCache
+      map.put("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
+      map.put("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
+
+      previous = now;
+
+    });
+    manager.registerGauge(this, registryName, metricsMap, true, getName(), getCategory().toString(), scope);
   }
 
   private float getPerSecond(long value, double seconds) {
     return (float) (value / seconds);
   }
 
-  // SolrInfoMBean methods
+  // SolrInfoBean methods
 
   @Override
   public String getName() {
-    return "HdfsBlockCache";
+    return "hdfsBlockCache";
   }
 
   @Override
@@ -118,12 +125,13 @@ public class Metrics extends SolrCacheBase implements SolrInfoMBean {
   }
 
   @Override
-  public String getSource() {
-    return null;
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
index ba7c7fd..64e6356 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
@@ -18,8 +18,8 @@ package org.apache.solr.store.hdfs;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -27,16 +27,18 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
+import com.codahale.metrics.MetricRegistry;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class HdfsLocalityReporter implements SolrInfoMBean {
+public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
   public static final String LOCALITY_BYTES_TOTAL = "locality.bytes.total";
   public static final String LOCALITY_BYTES_LOCAL = "locality.bytes.local";
   public static final String LOCALITY_BYTES_RATIO = "locality.bytes.ratio";
@@ -49,6 +51,9 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
   private String hostname;
   private final ConcurrentMap<HdfsDirectory,ConcurrentMap<FileStatus,BlockLocation[]>> cache;
 
+  private final Set<String> metricNames = new HashSet<>();
+  private MetricRegistry registry;
+
   public HdfsLocalityReporter() {
     cache = new ConcurrentHashMap<>();
   }
@@ -67,11 +72,6 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
   }
 
   @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-
-  @Override
   public String getDescription() {
     return "Provides metrics for HDFS data locality.";
   }
@@ -82,89 +82,71 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
   }
 
   @Override
-  public String getSource() {
-    return null;
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   /**
    * Provide statistics on HDFS block locality, both in terms of bytes and block counts.
    */
   @Override
-  public NamedList getStatistics() {
-    long totalBytes = 0;
-    long localBytes = 0;
-    int totalCount = 0;
-    int localCount = 0;
-
-    for (Iterator<HdfsDirectory> iterator = cache.keySet().iterator(); iterator.hasNext();) {
-      HdfsDirectory hdfsDirectory = iterator.next();
-
-      if (hdfsDirectory.isClosed()) {
-        iterator.remove();
-      } else {
-        try {
-          refreshDirectory(hdfsDirectory);
-          Map<FileStatus,BlockLocation[]> blockMap = cache.get(hdfsDirectory);
-
-          // For every block in every file in this directory, count it
-          for (BlockLocation[] locations : blockMap.values()) {
-            for (BlockLocation bl : locations) {
-              totalBytes += bl.getLength();
-              totalCount++;
-
-              if (Arrays.asList(bl.getHosts()).contains(hostname)) {
-                localBytes += bl.getLength();
-                localCount++;
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
+      long totalBytes = 0;
+      long localBytes = 0;
+      int totalCount = 0;
+      int localCount = 0;
+
+      for (Iterator<HdfsDirectory> iterator = cache.keySet().iterator(); iterator.hasNext();) {
+        HdfsDirectory hdfsDirectory = iterator.next();
+
+        if (hdfsDirectory.isClosed()) {
+          iterator.remove();
+        } else {
+          try {
+            refreshDirectory(hdfsDirectory);
+            Map<FileStatus,BlockLocation[]> blockMap = cache.get(hdfsDirectory);
+
+            // For every block in every file in this directory, count it
+            for (BlockLocation[] locations : blockMap.values()) {
+              for (BlockLocation bl : locations) {
+                totalBytes += bl.getLength();
+                totalCount++;
+
+                if (Arrays.asList(bl.getHosts()).contains(hostname)) {
+                  localBytes += bl.getLength();
+                  localCount++;
+                }
               }
             }
+          } catch (IOException e) {
+            logger.warn("Could not retrieve locality information for {} due to exception: {}",
+                hdfsDirectory.getHdfsDirPath(), e);
           }
-        } catch (IOException e) {
-          logger.warn("Could not retrieve locality information for {} due to exception: {}",
-              hdfsDirectory.getHdfsDirPath(), e);
         }
       }
-    }
-
-    return createStatistics(totalBytes, localBytes, totalCount, localCount);
-  }
-
-  /**
-   * Generate a statistics object based on the given measurements for all files monitored by this reporter.
-   * 
-   * @param totalBytes
-   *          The total bytes used
-   * @param localBytes
-   *          The amount of bytes found on local nodes
-   * @param totalCount
-   *          The total block count
-   * @param localCount
-   *          The amount of blocks found on local nodes
-   * @return HDFS block locality statistics
-   */
-  private NamedList<Number> createStatistics(long totalBytes, long localBytes, int totalCount, int localCount) {
-    NamedList<Number> statistics = new SimpleOrderedMap<Number>();
-
-    statistics.add(LOCALITY_BYTES_TOTAL, totalBytes);
-    statistics.add(LOCALITY_BYTES_LOCAL, localBytes);
-    if (localBytes == 0) {
-      statistics.add(LOCALITY_BYTES_RATIO, 0);
-    } else {
-      statistics.add(LOCALITY_BYTES_RATIO, localBytes / (double) totalBytes);
-    }
-    statistics.add(LOCALITY_BLOCKS_TOTAL, totalCount);
-    statistics.add(LOCALITY_BLOCKS_LOCAL, localCount);
-    if (localCount == 0) {
-      statistics.add(LOCALITY_BLOCKS_RATIO, 0);
-    } else {
-      statistics.add(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount);
-    }
-
-    return statistics;
+      map.put(LOCALITY_BYTES_TOTAL, totalBytes);
+      map.put(LOCALITY_BYTES_LOCAL, localBytes);
+      if (localBytes == 0) {
+        map.put(LOCALITY_BYTES_RATIO, 0);
+      } else {
+        map.put(LOCALITY_BYTES_RATIO, localBytes / (double) totalBytes);
+      }
+      map.put(LOCALITY_BLOCKS_TOTAL, totalCount);
+      map.put(LOCALITY_BLOCKS_LOCAL, localCount);
+      if (localCount == 0) {
+        map.put(LOCALITY_BLOCKS_RATIO, 0);
+      } else {
+        map.put(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount);
+      }
+    });
+    manager.registerGauge(this, registryName, metricsMap, true, "hdfsLocality", getCategory().toString(), scope);
   }
 
   /**
@@ -209,4 +191,5 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
       }
     }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
index 0ba0b81..7006b4a 100644
--- a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
+++ b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
@@ -19,11 +19,11 @@ package org.apache.solr.uninverting;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Map;
+import java.util.function.Function;
 
 import org.apache.lucene.document.BinaryDocValuesField; // javadocs
 import org.apache.lucene.document.NumericDocValuesField; // javadocs
 import org.apache.lucene.document.SortedDocValuesField; // javadocs
-import org.apache.lucene.document.SortedNumericDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField; // javadocs
 import org.apache.lucene.document.StringField; // javadocs
 import org.apache.lucene.index.BinaryDocValues;
@@ -170,62 +170,44 @@ public class UninvertingReader extends FilterLeafReader {
      * Fields with this type act as if they were indexed with
      * {@link SortedSetDocValuesField}.
      */
-    SORTED_SET_DOUBLE,
-    /** 
-     * Multi-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.IntPoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_INTEGER,
-    /** 
-     * Multi-valued Float, (e.g. indexed with {@link org.apache.lucene.document.FloatPoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_FLOAT,
-    /** 
-     * Multi-valued Long, (e.g. indexed with {@link org.apache.lucene.document.LongPoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_LONG,
-    /** 
-     * Multi-valued Double, (e.g. indexed with {@link org.apache.lucene.document.DoublePoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_DOUBLE
+    SORTED_SET_DOUBLE
+
   }
   
   /**
+   * 
    * Wraps a provided DirectoryReader. Note that for convenience, the returned reader
    * can be used normally (e.g. passed to {@link DirectoryReader#openIfChanged(DirectoryReader)})
    * and so on. 
+   * 
+   * @param in input directory reader
+   * @param perSegmentMapper function to map a segment reader to a mapping of fields to their uninversion type
+   * @return a wrapped directory reader
    */
+  public static DirectoryReader wrap(DirectoryReader in, final Function<LeafReader, Map<String,Type>> perSegmentMapper) throws IOException {
+    return new UninvertingDirectoryReader(in, perSegmentMapper);
+  }
+  
   public static DirectoryReader wrap(DirectoryReader in, final Map<String,Type> mapping) throws IOException {
-    return new UninvertingDirectoryReader(in, mapping);
+    return UninvertingReader.wrap(in, (r) -> mapping);
   }
   
   static class UninvertingDirectoryReader extends FilterDirectoryReader {
-    final Map<String,Type> mapping;
+    final Function<LeafReader, Map<String,Type>> mapper;
     
-    public UninvertingDirectoryReader(DirectoryReader in, final Map<String,Type> mapping) throws IOException {
+    public UninvertingDirectoryReader(DirectoryReader in, final Function<LeafReader, Map<String,Type>> mapper) throws IOException {
       super(in, new FilterDirectoryReader.SubReaderWrapper() {
         @Override
         public LeafReader wrap(LeafReader reader) {
-          return new UninvertingReader(reader, mapping);
+          return new UninvertingReader(reader, mapper.apply(reader));
         }
       });
-      this.mapping = mapping;
+      this.mapper = mapper;
     }
 
     @Override
     protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
-      return new UninvertingDirectoryReader(in, mapping);
+      return new UninvertingDirectoryReader(in, mapper);
     }
 
     // NOTE: delegating the cache helpers is wrong since this wrapper alters the
@@ -244,7 +226,7 @@ public class UninvertingReader extends FilterLeafReader {
   /** 
    * Create a new UninvertingReader with the specified mapping 
    * <p>
-   * Expert: This should almost never be used. Use {@link #wrap(DirectoryReader, Map)}
+   * Expert: This should almost never be used. Use {@link #wrap(DirectoryReader, Function)}
    * instead.
    *  
    * @lucene.internal
@@ -293,12 +275,6 @@ public class UninvertingReader extends FilterLeafReader {
             case SORTED_SET_DOUBLE:
               type = DocValuesType.SORTED_SET;
               break;
-            case SORTED_INTEGER:
-            case SORTED_FLOAT:
-            case SORTED_LONG:
-            case SORTED_DOUBLE:
-              type = DocValuesType.SORTED_NUMERIC;
-              break;
             default:
               throw new AssertionError();
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
index d0daebb..bc2afa8 100644
--- a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
+++ b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
@@ -281,7 +281,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
     Thread thread = new Thread() {
       @Override
       public void run() {
-        MDCLoggingContext.setCoreDescriptor(cd);
+        MDCLoggingContext.setCoreDescriptor(cc, cd);
         try {
           if (SKIP_AUTO_RECOVERY) {
             log.warn("Skipping recovery according to sys prop solrcloud.skip.autorecovery");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
index fdc9d22..dd179f2 100644
--- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
+++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
@@ -18,7 +18,6 @@ package org.apache.solr.update;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
@@ -48,8 +47,6 @@ import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.SolrConfig.UpdateHandlerInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.metrics.SolrMetricManager;
@@ -124,7 +121,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
     commitWithinSoftCommit = updateHandlerInfo.commitWithinSoftCommit;
     indexWriterCloseWaitsForMerges = updateHandlerInfo.indexWriterCloseWaitsForMerges;
 
-    ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zkController = core.getCoreContainer().getZkController();
     if (zkController != null) {
       DocCollection dc = zkController.getClusterState().getCollection(core.getCoreDescriptor().getCollectionName());
       if (dc.getRealtimeReplicas() == 1) {
@@ -162,24 +159,40 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
-    commitCommands = manager.meter(registry, "commits", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope);
-    optimizeCommands = manager.meter(registry, "optimizes", getCategory().toString(), scope);
-    rollbackCommands = manager.meter(registry, "rollbacks", getCategory().toString(), scope);
-    splitCommands = manager.meter(registry, "splits", getCategory().toString(), scope);
-    mergeIndexesCommands = manager.meter(registry, "merges", getCategory().toString(), scope);
-    expungeDeleteCommands = manager.meter(registry, "expungeDeletes", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope);
-
-    addCommandsCumulative = manager.meter(registry, "cumulativeAdds", getCategory().toString(), scope);
-    deleteByIdCommandsCumulative = manager.meter(registry, "cumulativeDeletesById", getCategory().toString(), scope);
-    deleteByQueryCommandsCumulative = manager.meter(registry, "cumulativeDeletesByQuery", getCategory().toString(), scope);
-    numErrorsCumulative = manager.meter(registry, "cumulativeErrors", getCategory().toString(), scope);
+    commitCommands = manager.meter(this, registry, "commits", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope);
+    if (commitTracker.getDocsUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> commitTracker.getDocsUpperBound(), true, "autoCommitMaxDocs",
+          getCategory().toString(), scope);
+    }
+    if (commitTracker.getTimeUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> "" + commitTracker.getTimeUpperBound() + "ms", true, "autoCommitMaxTime",
+          getCategory().toString(), scope);
+    }
+    if (softCommitTracker.getDocsUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> softCommitTracker.getDocsUpperBound(), true, "softAutoCommitMaxDocs",
+          getCategory().toString(), scope);
+    }
+    if (softCommitTracker.getTimeUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> "" + softCommitTracker.getTimeUpperBound() + "ms", true, "softAutoCommitMaxTime",
+          getCategory().toString(), scope);
+    }
+    optimizeCommands = manager.meter(this, registry, "optimizes", getCategory().toString(), scope);
+    rollbackCommands = manager.meter(this, registry, "rollbacks", getCategory().toString(), scope);
+    splitCommands = manager.meter(this, registry, "splits", getCategory().toString(), scope);
+    mergeIndexesCommands = manager.meter(this, registry, "merges", getCategory().toString(), scope);
+    expungeDeleteCommands = manager.meter(this, registry, "expungeDeletes", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope);
+
+    addCommandsCumulative = manager.meter(this, registry, "cumulativeAdds", getCategory().toString(), scope);
+    deleteByIdCommandsCumulative = manager.meter(this, registry, "cumulativeDeletesById", getCategory().toString(), scope);
+    deleteByQueryCommandsCumulative = manager.meter(this, registry, "cumulativeDeletesByQuery", getCategory().toString(), scope);
+    numErrorsCumulative = manager.meter(this, registry, "cumulativeErrors", getCategory().toString(), scope);
   }
 
   private void deleteAll() throws IOException {
@@ -755,7 +768,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
    */
   @Override
   public void rollback(RollbackUpdateCommand cmd) throws IOException {
-    if (core.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
+    if (core.getCoreContainer().isZooKeeperAware()) {
       throw new UnsupportedOperationException("Rollback is currently not supported in SolrCloud mode. (SOLR-4895)");
     }
 
@@ -811,7 +824,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   @Override
   public void closeWriter(IndexWriter writer) throws IOException {
 
-    assert TestInjection.injectNonGracefullClose(core.getCoreDescriptor().getCoreContainer());
+    assert TestInjection.injectNonGracefullClose(core.getCoreContainer());
     
     boolean clearRequestInfo = false;
     solrCoreState.getCommitLock().lock();
@@ -951,7 +964,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
 
 
   /////////////////////////////////////////////////////////////////////
-  // SolrInfoMBean stuff: Statistics and Module Info
+  // SolrInfoBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////
 
   @Override
@@ -960,70 +973,11 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   }
 
   @Override
-  public String getVersion() {
-    return SolrCore.version;
-  }
-
-  @Override
   public String getDescription() {
     return "Update handler that efficiently directly updates the on-disk main lucene index";
   }
 
   @Override
-  public String getSource() {
-    return null;
-  }
-
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
-  @Override
-  public NamedList getStatistics() {
-    NamedList lst = new SimpleOrderedMap();
-    lst.add("commits", commitCommands.getCount());
-    if (commitTracker.getDocsUpperBound() > 0) {
-      lst.add("autocommit maxDocs", commitTracker.getDocsUpperBound());
-    }
-    if (commitTracker.getTimeUpperBound() > 0) {
-      lst.add("autocommit maxTime", "" + commitTracker.getTimeUpperBound() + "ms");
-    }
-    lst.add("autocommits", commitTracker.getCommitCount());
-    if (softCommitTracker.getDocsUpperBound() > 0) {
-      lst.add("soft autocommit maxDocs", softCommitTracker.getDocsUpperBound());
-    }
-    if (softCommitTracker.getTimeUpperBound() > 0) {
-      lst.add("soft autocommit maxTime", "" + softCommitTracker.getTimeUpperBound() + "ms");
-    }
-    lst.add("soft autocommits", softCommitTracker.getCommitCount());
-    lst.add("optimizes", optimizeCommands.getCount());
-    lst.add("rollbacks", rollbackCommands.getCount());
-    lst.add("expungeDeletes", expungeDeleteCommands.getCount());
-    lst.add("docsPending", numDocsPending.longValue());
-    // pset.size() not synchronized, but it should be fine to access.
-    // lst.add("deletesPending", pset.size());
-    lst.add("adds", addCommands.longValue());
-    lst.add("deletesById", deleteByIdCommands.longValue());
-    lst.add("deletesByQuery", deleteByQueryCommands.longValue());
-    lst.add("errors", numErrors.longValue());
-    lst.add("cumulative_adds", addCommandsCumulative.getCount());
-    lst.add("cumulative_deletesById", deleteByIdCommandsCumulative.getCount());
-    lst.add("cumulative_deletesByQuery", deleteByQueryCommandsCumulative.getCount());
-    lst.add("cumulative_errors", numErrorsCumulative.getCount());
-    if (this.ulog != null) {
-      lst.add("transaction_logs_total_size", ulog.getTotalLogsSize());
-      lst.add("transaction_logs_total_number", ulog.getTotalLogsNumber());
-    }
-    return lst;
-  }
-
-  @Override
-  public String toString() {
-    return "DirectUpdateHandler2" + getStatistics();
-  }
-  
-  @Override
   public SolrCoreState getSolrCoreState() {
     return solrCoreState;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
index 71e20d9..7bb74d0 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
@@ -37,7 +37,7 @@ import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.HdfsUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -262,7 +262,7 @@ public class HdfsUpdateLog extends UpdateLog {
     }
 
     // initialize metrics
-    core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.TLOG.toString(), this);
+    core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.TLOG.toString(), this);
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/PeerSync.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index 9470cca..dfadb0c 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -43,7 +43,7 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
 import org.apache.solr.handler.component.ShardHandler;
 import org.apache.solr.handler.component.ShardHandlerFactory;
@@ -151,25 +151,25 @@ public class PeerSync implements SolrMetricProducer {
     this.cantReachIsSuccess = cantReachIsSuccess;
     this.getNoVersionsIsSuccess = getNoVersionsIsSuccess;
     this.doFingerprint = doFingerprint && !("true".equals(System.getProperty("solr.disableFingerprint")));
-    this.client = core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getHttpClient();
+    this.client = core.getCoreContainer().getUpdateShardHandler().getHttpClient();
     this.onlyIfActive = onlyIfActive;
     
     uhandler = core.getUpdateHandler();
     ulog = uhandler.getUpdateLog();
     // TODO: close
-    shardHandlerFactory = (HttpShardHandlerFactory) core.getCoreDescriptor().getCoreContainer().getShardHandlerFactory();
+    shardHandlerFactory = (HttpShardHandlerFactory) core.getCoreContainer().getShardHandlerFactory();
     shardHandler = shardHandlerFactory.getShardHandler(client);
 
-    core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.REPLICATION.toString(), this);
+    core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.REPLICATION.toString(), this);
   }
 
   public static final String METRIC_SCOPE = "peerSync";
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
-    syncTime = manager.timer(registry, "time", scope, METRIC_SCOPE);
-    syncErrors = manager.counter(registry, "errors", scope, METRIC_SCOPE);
-    syncSkipped = manager.counter(registry, "skipped", scope, METRIC_SCOPE);
+    syncTime = manager.timer(null, registry, "time", scope, METRIC_SCOPE);
+    syncErrors = manager.counter(null, registry, "errors", scope, METRIC_SCOPE);
+    syncSkipped = manager.counter(null, registry, "skipped", scope, METRIC_SCOPE);
   }
 
   /** optional list of updates we had before possibly receiving new updates */
@@ -184,7 +184,7 @@ public class PeerSync implements SolrMetricProducer {
 
   // start of peersync related debug messages.  includes the core name for correlation.
   private String msg() {
-    ZkController zkController = uhandler.core.getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zkController = uhandler.core.getCoreContainer().getZkController();
 
     String myURL = "";
 
@@ -882,7 +882,7 @@ public class PeerSync implements SolrMetricProducer {
 
   /** Requests and applies recent updates from peers */
   public static void sync(SolrCore core, List<String> replicas, int nUpdates) {
-    ShardHandlerFactory shardHandlerFactory = core.getCoreDescriptor().getCoreContainer().getShardHandlerFactory();
+    ShardHandlerFactory shardHandlerFactory = core.getCoreContainer().getShardHandlerFactory();
 
     ShardHandler shardHandler = shardHandlerFactory.getShardHandler();
    


[20/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGroupFacetCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGroupFacetCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGroupFacetCollector.java
deleted file mode 100644
index cee327c..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGroupFacetCollector.java
+++ /dev/null
@@ -1,415 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.term;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.lucene.index.DocValues;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.index.SortedSetDocValues;
-import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.grouping.GroupFacetCollector;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.SentinelIntSet;
-import org.apache.lucene.util.UnicodeUtil;
-
-/**
- * An implementation of {@link GroupFacetCollector} that computes grouped facets based on the indexed terms
- * from DocValues.
- *
- * @lucene.experimental
- */
-public abstract class TermGroupFacetCollector extends GroupFacetCollector {
-
-  final List<GroupedFacetHit> groupedFacetHits;
-  final SentinelIntSet segmentGroupedFacetHits;
-
-  SortedDocValues groupFieldTermsIndex;
-
-  /**
-   * Factory method for creating the right implementation based on the fact whether the facet field contains
-   * multiple tokens per documents.
-   *
-   * @param groupField The group field
-   * @param facetField The facet field
-   * @param facetFieldMultivalued Whether the facet field has multiple tokens per document
-   * @param facetPrefix The facet prefix a facet entry should start with to be included.
-   * @param initialSize The initial allocation size of the internal int set and group facet list which should roughly
-   *                    match the total number of expected unique groups. Be aware that the heap usage is
-   *                    4 bytes * initialSize.
-   * @return <code>TermGroupFacetCollector</code> implementation
-   */
-  public static TermGroupFacetCollector createTermGroupFacetCollector(String groupField,
-                                                                      String facetField,
-                                                                      boolean facetFieldMultivalued,
-                                                                      BytesRef facetPrefix,
-                                                                      int initialSize) {
-    if (facetFieldMultivalued) {
-      return new MV(groupField, facetField, facetPrefix, initialSize);
-    } else {
-      return new SV(groupField, facetField, facetPrefix, initialSize);
-    }
-  }
-
-  TermGroupFacetCollector(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
-    super(groupField, facetField, facetPrefix);
-    groupedFacetHits = new ArrayList<>(initialSize);
-    segmentGroupedFacetHits = new SentinelIntSet(initialSize, Integer.MIN_VALUE);
-  }
-
-  // Implementation for single valued facet fields.
-  static class SV extends TermGroupFacetCollector {
-
-    private SortedDocValues facetFieldTermsIndex;
-
-    SV(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
-      super(groupField, facetField, facetPrefix, initialSize);
-    }
-
-    @Override
-    public void collect(int doc) throws IOException {
-      if (doc > facetFieldTermsIndex.docID()) {
-        facetFieldTermsIndex.advance(doc);
-      }
-
-      int facetOrd;
-      if (doc == facetFieldTermsIndex.docID()) {
-        facetOrd = facetFieldTermsIndex.ordValue();
-      } else {
-        facetOrd = -1;
-      }
-      
-      if (facetOrd < startFacetOrd || facetOrd >= endFacetOrd) {
-        return;
-      }
-
-      if (doc > groupFieldTermsIndex.docID()) {
-        groupFieldTermsIndex.advance(doc);
-      }
-
-      int groupOrd;
-      if (doc == groupFieldTermsIndex.docID()) {
-        groupOrd = groupFieldTermsIndex.ordValue();
-      } else {
-        groupOrd = -1;
-      }
-      int segmentGroupedFacetsIndex = groupOrd * (facetFieldTermsIndex.getValueCount()+1) + facetOrd;
-      if (segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
-        return;
-      }
-
-      segmentTotalCount++;
-      segmentFacetCounts[facetOrd+1]++;
-
-      segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
-
-      BytesRef groupKey;
-      if (groupOrd == -1) {
-        groupKey = null;
-      } else {
-        groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
-      }
-
-      BytesRef facetKey;
-      if (facetOrd == -1) {
-        facetKey = null;
-      } else {
-        facetKey = BytesRef.deepCopyOf(facetFieldTermsIndex.lookupOrd(facetOrd));
-      }
-
-      groupedFacetHits.add(new GroupedFacetHit(groupKey, facetKey));
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      if (segmentFacetCounts != null) {
-        segmentResults.add(createSegmentResult());
-      }
-
-      groupFieldTermsIndex = DocValues.getSorted(context.reader(), groupField);
-      facetFieldTermsIndex = DocValues.getSorted(context.reader(), facetField);
-
-      // 1+ to allow for the -1 "not set":
-      segmentFacetCounts = new int[facetFieldTermsIndex.getValueCount()+1];
-      segmentTotalCount = 0;
-
-      segmentGroupedFacetHits.clear();
-      for (GroupedFacetHit groupedFacetHit : groupedFacetHits) {
-        int facetOrd = groupedFacetHit.facetValue == null ? -1 : facetFieldTermsIndex.lookupTerm(groupedFacetHit.facetValue);
-        if (groupedFacetHit.facetValue != null && facetOrd < 0) {
-          continue;
-        }
-
-        int groupOrd = groupedFacetHit.groupValue == null ? -1 : groupFieldTermsIndex.lookupTerm(groupedFacetHit.groupValue);
-        if (groupedFacetHit.groupValue != null && groupOrd < 0) {
-          continue;
-        }
-
-        int segmentGroupedFacetsIndex = groupOrd * (facetFieldTermsIndex.getValueCount()+1) + facetOrd;
-        segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
-      }
-
-      if (facetPrefix != null) {
-        startFacetOrd = facetFieldTermsIndex.lookupTerm(facetPrefix);
-        if (startFacetOrd < 0) {
-          // Points to the ord one higher than facetPrefix
-          startFacetOrd = -startFacetOrd - 1;
-        }
-        BytesRefBuilder facetEndPrefix = new BytesRefBuilder();
-        facetEndPrefix.append(facetPrefix);
-        facetEndPrefix.append(UnicodeUtil.BIG_TERM);
-        endFacetOrd = facetFieldTermsIndex.lookupTerm(facetEndPrefix.get());
-        assert endFacetOrd < 0;
-        endFacetOrd = -endFacetOrd - 1; // Points to the ord one higher than facetEndPrefix
-      } else {
-        startFacetOrd = -1;
-        endFacetOrd = facetFieldTermsIndex.getValueCount();
-      }
-    }
-
-    @Override
-    protected SegmentResult createSegmentResult() throws IOException {
-      return new SegmentResult(segmentFacetCounts, segmentTotalCount, facetFieldTermsIndex.termsEnum(), startFacetOrd, endFacetOrd);
-    }
-
-    private static class SegmentResult extends GroupFacetCollector.SegmentResult {
-
-      final TermsEnum tenum;
-
-      SegmentResult(int[] counts, int total, TermsEnum tenum, int startFacetOrd, int endFacetOrd) throws IOException {
-        super(counts, total - counts[0], counts[0], endFacetOrd+1);
-        this.tenum = tenum;
-        this.mergePos = startFacetOrd == -1 ? 1 : startFacetOrd+1;
-        if (mergePos < maxTermPos) {
-          assert tenum != null;
-          tenum.seekExact(startFacetOrd == -1 ? 0 : startFacetOrd);
-          mergeTerm = tenum.term();
-        }
-      }
-
-      @Override
-      protected void nextTerm() throws IOException {
-        mergeTerm = tenum.next();
-      }
-    }
-  }
-
-  // Implementation for multi valued facet fields.
-  static class MV extends TermGroupFacetCollector {
-
-    private SortedSetDocValues facetFieldDocTermOrds;
-    private TermsEnum facetOrdTermsEnum;
-    private int facetFieldNumTerms;
-
-    MV(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
-      super(groupField, facetField, facetPrefix, initialSize);
-    }
-
-    @Override
-    public void collect(int doc) throws IOException {
-      if (doc > groupFieldTermsIndex.docID()) {
-        groupFieldTermsIndex.advance(doc);
-      }
-
-      int groupOrd;
-      if (doc == groupFieldTermsIndex.docID()) {
-        groupOrd = groupFieldTermsIndex.ordValue();
-      } else {
-        groupOrd = -1;
-      }
-      
-      if (facetFieldNumTerms == 0) {
-        int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1);
-        if (facetPrefix != null || segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
-          return;
-        }
-
-        segmentTotalCount++;
-        segmentFacetCounts[facetFieldNumTerms]++;
-
-        segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
-        BytesRef groupKey;
-        if (groupOrd == -1) {
-          groupKey = null;
-        } else {
-          groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
-        }
-        groupedFacetHits.add(new GroupedFacetHit(groupKey, null));
-        return;
-      }
-
-      if (doc > facetFieldDocTermOrds.docID()) {
-        facetFieldDocTermOrds.advance(doc);
-      }
-      boolean empty = true;
-      if (doc == facetFieldDocTermOrds.docID()) {
-        long ord;
-        while ((ord = facetFieldDocTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
-          process(groupOrd, (int) ord);
-          empty = false;
-        }
-      }
-      
-      if (empty) {
-        process(groupOrd, facetFieldNumTerms); // this facet ord is reserved for docs not containing facet field.
-      }
-    }
-    
-    private void process(int groupOrd, int facetOrd) throws IOException {
-      if (facetOrd < startFacetOrd || facetOrd >= endFacetOrd) {
-        return;
-      }
-
-      int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1) + facetOrd;
-      if (segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
-        return;
-      }
-
-      segmentTotalCount++;
-      segmentFacetCounts[facetOrd]++;
-
-      segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
-
-      BytesRef groupKey;
-      if (groupOrd == -1) {
-        groupKey = null;
-      } else {
-        groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
-      }
-
-      final BytesRef facetValue;
-      if (facetOrd == facetFieldNumTerms) {
-        facetValue = null;
-      } else {
-        facetValue = BytesRef.deepCopyOf(facetFieldDocTermOrds.lookupOrd(facetOrd));
-      }
-      groupedFacetHits.add(new GroupedFacetHit(groupKey, facetValue));
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      if (segmentFacetCounts != null) {
-        segmentResults.add(createSegmentResult());
-      }
-
-      groupFieldTermsIndex = DocValues.getSorted(context.reader(), groupField);
-      facetFieldDocTermOrds = DocValues.getSortedSet(context.reader(), facetField);
-      facetFieldNumTerms = (int) facetFieldDocTermOrds.getValueCount();
-      if (facetFieldNumTerms == 0) {
-        facetOrdTermsEnum = null;
-      } else {
-        facetOrdTermsEnum = facetFieldDocTermOrds.termsEnum();
-      }
-      // [facetFieldNumTerms() + 1] for all possible facet values and docs not containing facet field
-      segmentFacetCounts = new int[facetFieldNumTerms + 1];
-      segmentTotalCount = 0;
-
-      segmentGroupedFacetHits.clear();
-      for (GroupedFacetHit groupedFacetHit : groupedFacetHits) {
-        int groupOrd = groupedFacetHit.groupValue == null ? -1 : groupFieldTermsIndex.lookupTerm(groupedFacetHit.groupValue);
-        if (groupedFacetHit.groupValue != null && groupOrd < 0) {
-          continue;
-        }
-
-        int facetOrd;
-        if (groupedFacetHit.facetValue != null) {
-          if (facetOrdTermsEnum == null || !facetOrdTermsEnum.seekExact(groupedFacetHit.facetValue)) {
-            continue;
-          }
-          facetOrd = (int) facetOrdTermsEnum.ord();
-        } else {
-          facetOrd = facetFieldNumTerms;
-        }
-
-        // (facetFieldDocTermOrds.numTerms() + 1) for all possible facet values and docs not containing facet field
-        int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1) + facetOrd;
-        segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
-      }
-
-      if (facetPrefix != null) {
-        TermsEnum.SeekStatus seekStatus;
-        if (facetOrdTermsEnum != null) {
-          seekStatus = facetOrdTermsEnum.seekCeil(facetPrefix);
-        } else {
-          seekStatus = TermsEnum.SeekStatus.END;
-        }
-
-        if (seekStatus != TermsEnum.SeekStatus.END) {
-          startFacetOrd = (int) facetOrdTermsEnum.ord();
-        } else {
-          startFacetOrd = 0;
-          endFacetOrd = 0;
-          return;
-        }
-
-        BytesRefBuilder facetEndPrefix = new BytesRefBuilder();
-        facetEndPrefix.append(facetPrefix);
-        facetEndPrefix.append(UnicodeUtil.BIG_TERM);
-        seekStatus = facetOrdTermsEnum.seekCeil(facetEndPrefix.get());
-        if (seekStatus != TermsEnum.SeekStatus.END) {
-          endFacetOrd = (int) facetOrdTermsEnum.ord();
-        } else {
-          endFacetOrd = facetFieldNumTerms; // Don't include null...
-        }
-      } else {
-        startFacetOrd = 0;
-        endFacetOrd = facetFieldNumTerms + 1;
-      }
-    }
-
-    @Override
-    protected SegmentResult createSegmentResult() throws IOException {
-      return new SegmentResult(segmentFacetCounts, segmentTotalCount, facetFieldNumTerms, facetOrdTermsEnum, startFacetOrd, endFacetOrd);
-    }
-
-    private static class SegmentResult extends GroupFacetCollector.SegmentResult {
-
-      final TermsEnum tenum;
-
-      SegmentResult(int[] counts, int total, int missingCountIndex, TermsEnum tenum, int startFacetOrd, int endFacetOrd) throws IOException {
-        super(counts, total - counts[missingCountIndex], counts[missingCountIndex],
-            endFacetOrd == missingCountIndex + 1 ?  missingCountIndex : endFacetOrd);
-        this.tenum = tenum;
-        this.mergePos = startFacetOrd;
-        if (tenum != null) {
-          tenum.seekExact(mergePos);
-          mergeTerm = tenum.term();
-        }
-      }
-
-      @Override
-      protected void nextTerm() throws IOException {
-        mergeTerm = tenum.next();
-      }
-    }
-  }
-}
-
-class GroupedFacetHit {
-
-  final BytesRef groupValue;
-  final BytesRef facetValue;
-
-  GroupedFacetHit(BytesRef groupValue, BytesRef facetValue) {
-    this.groupValue = groupValue;
-    this.facetValue = facetValue;
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGrouper.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGrouper.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGrouper.java
deleted file mode 100644
index 246ee78..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermGrouper.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.lucene.search.grouping.term;
-
-import java.io.IOException;
-import java.util.Collection;
-
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.AllGroupsCollector;
-import org.apache.lucene.search.grouping.FirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.SecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.Grouper;
-import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.util.BytesRef;
-
-/**
- * Collector factory for grouping by term
- */
-public class TermGrouper extends Grouper<BytesRef> {
-
-  private final String field;
-  private final int initialSize;
-
-  /**
-   * Create a new TermGrouper
-   * @param field the field to group on
-   */
-  public TermGrouper(String field) {
-    this(field, 128);
-  }
-
-  /**
-   * Create a new TermGrouper
-   * @param field       the field to group on
-   * @param initialSize the initial size of various internal datastructures
-   */
-  public TermGrouper(String field, int initialSize) {
-    this.field = field;
-    this.initialSize = initialSize;
-  }
-
-  @Override
-  public FirstPassGroupingCollector<BytesRef> getFirstPassCollector(Sort sort, int count) throws IOException {
-    return new TermFirstPassGroupingCollector(field, sort, count);
-  }
-
-  @Override
-  public AllGroupHeadsCollector<BytesRef> getGroupHeadsCollector(Sort sort) {
-    return TermAllGroupHeadsCollector.create(field, sort, initialSize);
-  }
-
-  @Override
-  public AllGroupsCollector<BytesRef> getAllGroupsCollector() {
-    return new TermAllGroupsCollector(field, initialSize);
-  }
-
-  @Override
-  public SecondPassGroupingCollector<BytesRef> getSecondPassCollector(
-      Collection<SearchGroup<BytesRef>> groups, Sort groupSort, Sort withinGroupSort,
-      int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException {
-    return new TermSecondPassGroupingCollector(field, groups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermSecondPassGroupingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermSecondPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermSecondPassGroupingCollector.java
deleted file mode 100644
index 75d2210..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermSecondPassGroupingCollector.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.term;
-
-import java.io.IOException;
-import java.util.Collection;
-
-import org.apache.lucene.index.DocValues;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.SecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.SentinelIntSet;
-
-/**
- * Concrete implementation of {@link SecondPassGroupingCollector} that groups based on
- * field values and more specifically uses {@link SortedDocValues}
- * to collect grouped docs.
- *
- * @lucene.experimental
- */
-public class TermSecondPassGroupingCollector extends SecondPassGroupingCollector<BytesRef> {
-
-  private final String groupField;
-  private final SentinelIntSet ordSet;
-
-  private SortedDocValues index;
-
-  @SuppressWarnings({"unchecked", "rawtypes"})
-  public TermSecondPassGroupingCollector(String groupField, Collection<SearchGroup<BytesRef>> groups, Sort groupSort, Sort withinGroupSort,
-                                         int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields)
-      throws IOException {
-    super(groups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
-    this.groupField = groupField;
-    this.ordSet = new SentinelIntSet(groupMap.size(), -2);
-    super.groupDocs = (SearchGroupDocs<BytesRef>[]) new SearchGroupDocs[ordSet.keys.length];
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
-    super.doSetNextReader(readerContext);
-    index = DocValues.getSorted(readerContext.reader(), groupField);
-
-    // Rebuild ordSet
-    ordSet.clear();
-    for (SearchGroupDocs<BytesRef> group : groupMap.values()) {
-//      System.out.println("  group=" + (group.groupValue == null ? "null" : group.groupValue.utf8ToString()));
-      int ord = group.groupValue == null ? -1 : index.lookupTerm(group.groupValue);
-      if (group.groupValue == null || ord >= 0) {
-        groupDocs[ordSet.put(ord)] = group;
-      }
-    }
-  }
-
-  @Override
-  protected SearchGroupDocs<BytesRef> retrieveGroup(int doc) throws IOException {
-    if (doc > index.docID()) {
-      index.advance(doc);
-    }
-
-    int ord;
-    if (doc == index.docID()) {
-      ord = index.ordValue();
-    } else {
-      ord = -1;
-    }
-    
-    int slot = ordSet.find(ord);
-    if (slot >= 0) {
-      return groupDocs[slot];
-    }
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/package-info.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/package-info.java
deleted file mode 100644
index 2732011..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/package-info.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Support for grouping by indexed terms via {@link org.apache.lucene.index.DocValues}.
- */
-package org.apache.lucene.search.grouping.term;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java
index 0c99456..5ab4b5f 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java
@@ -49,8 +49,6 @@ import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupHeadsCollector;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
@@ -513,19 +511,12 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase {
 
   @SuppressWarnings({"unchecked","rawtypes"})
   private AllGroupHeadsCollector<?> createRandomCollector(String groupField, Sort sortWithinGroup) {
-    AllGroupHeadsCollector<?> collector;
     if (random().nextBoolean()) {
       ValueSource vs = new BytesRefFieldSource(groupField);
-      collector =  new FunctionAllGroupHeadsCollector(vs, new HashMap<>(), sortWithinGroup);
+      return AllGroupHeadsCollector.newCollector(new ValueSourceGroupSelector(vs, new HashMap<>()), sortWithinGroup);
     } else {
-      collector =  TermAllGroupHeadsCollector.create(groupField, sortWithinGroup);
+      return AllGroupHeadsCollector.newCollector(new TermGroupSelector(groupField), sortWithinGroup);
     }
-
-    if (VERBOSE) {
-      System.out.println("Selected implementation: " + collector.getClass().getSimpleName());
-    }
-
-    return collector;
   }
 
   private void addGroupField(Document doc, String groupField, String value, DocValuesType valueType) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java
index ab70fad..0d777f6 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java
@@ -16,6 +16,8 @@
  */
 package org.apache.lucene.search.grouping;
 
+import java.util.HashMap;
+
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
@@ -28,14 +30,10 @@ import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 
-import java.util.HashMap;
-
 public class AllGroupsCollectorTest extends LuceneTestCase {
 
   public void testTotalGroupCount() throws Exception {
@@ -124,19 +122,13 @@ public class AllGroupsCollectorTest extends LuceneTestCase {
   }
 
   private AllGroupsCollector<?> createRandomCollector(String groupField) {
-    AllGroupsCollector<?> selected;
     if (random().nextBoolean()) {
-      selected = new TermAllGroupsCollector(groupField);
-    } else {
-      ValueSource vs = new BytesRefFieldSource(groupField);
-      selected = new FunctionAllGroupsCollector(vs, new HashMap<>());
+      return new AllGroupsCollector<>(new TermGroupSelector(groupField));
     }
-
-    if (VERBOSE) {
-      System.out.println("Selected implementation: " + selected.getClass().getName());
+    else {
+      ValueSource vs = new BytesRefFieldSource(groupField);
+      return new AllGroupsCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()));
     }
-
-    return selected;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java
index b5d67cf..89d9a6e 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java
@@ -44,17 +44,12 @@ import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.grouping.function.FunctionDistinctValuesCollector;
-import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermDistinctValuesCollector;
-import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.mutable.MutableValue;
 import org.apache.lucene.util.mutable.MutableValueStr;
 
-
 public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
 
   private final static NullComparator nullComparator = new NullComparator();
@@ -126,32 +121,27 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
     IndexSearcher indexSearcher = newSearcher(w.getReader());
     w.close();
 
-    Comparator<DistinctValuesCollector.GroupCount<Comparable<Object>>> cmp = new Comparator<DistinctValuesCollector.GroupCount<Comparable<Object>>>() {
-
-      @Override
-      public int compare(DistinctValuesCollector.GroupCount<Comparable<Object>> groupCount1, DistinctValuesCollector.GroupCount<Comparable<Object>> groupCount2) {
-        if (groupCount1.groupValue == null) {
-          if (groupCount2.groupValue == null) {
-            return 0;
-          }
-          return -1;
-        } else if (groupCount2.groupValue == null) {
-          return 1;
-        } else {
-          return groupCount1.groupValue.compareTo(groupCount2.groupValue);
+    Comparator<DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>>> cmp = (groupCount1, groupCount2) -> {
+      if (groupCount1.groupValue == null) {
+        if (groupCount2.groupValue == null) {
+          return 0;
         }
+        return -1;
+      } else if (groupCount2.groupValue == null) {
+        return 1;
+      } else {
+        return groupCount1.groupValue.compareTo(groupCount2.groupValue);
       }
-
     };
 
     // === Search for content:random
     FirstPassGroupingCollector<Comparable<Object>> firstCollector = createRandomFirstPassCollector(new Sort(), GROUP_FIELD, 10);
     indexSearcher.search(new TermQuery(new Term("content", "random")), firstCollector);
-    DistinctValuesCollector<Comparable<Object>> distinctValuesCollector
-        = createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
+    DistinctValuesCollector<Comparable<Object>, Comparable<Object>> distinctValuesCollector
+        = createDistinctCountCollector(firstCollector, COUNT_FIELD);
     indexSearcher.search(new TermQuery(new Term("content", "random")), distinctValuesCollector);
 
-    List<DistinctValuesCollector.GroupCount<Comparable<Object>>> gcs = distinctValuesCollector.getGroups();
+    List<DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>>> gcs = distinctValuesCollector.getGroups();
     Collections.sort(gcs, cmp);
     assertEquals(4, gcs.size());
 
@@ -180,7 +170,7 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
     // === Search for content:some
     firstCollector = createRandomFirstPassCollector(new Sort(), GROUP_FIELD, 10);
     indexSearcher.search(new TermQuery(new Term("content", "some")), firstCollector);
-    distinctValuesCollector = createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
+    distinctValuesCollector = createDistinctCountCollector(firstCollector, COUNT_FIELD);
     indexSearcher.search(new TermQuery(new Term("content", "some")), distinctValuesCollector);
 
     gcs = distinctValuesCollector.getGroups();
@@ -207,7 +197,7 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
      // === Search for content:blob
     firstCollector = createRandomFirstPassCollector(new Sort(), GROUP_FIELD, 10);
     indexSearcher.search(new TermQuery(new Term("content", "blob")), firstCollector);
-    distinctValuesCollector = createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
+    distinctValuesCollector = createDistinctCountCollector(firstCollector, COUNT_FIELD);
     indexSearcher.search(new TermQuery(new Term("content", "blob")), distinctValuesCollector);
 
     gcs = distinctValuesCollector.getGroups();
@@ -240,15 +230,15 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
         Sort groupSort = new Sort(new SortField("id", SortField.Type.STRING));
         int topN = 1 + random.nextInt(10);
 
-        List<DistinctValuesCollector.GroupCount<Comparable<?>>> expectedResult = createExpectedResult(context, term, groupSort, topN);
+        List<DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>>> expectedResult = createExpectedResult(context, term, groupSort, topN);
 
-        FirstPassGroupingCollector<Comparable<?>> firstCollector = createRandomFirstPassCollector(groupSort, GROUP_FIELD, topN);
+        FirstPassGroupingCollector<Comparable<Object>> firstCollector = createRandomFirstPassCollector(groupSort, GROUP_FIELD, topN);
         searcher.search(new TermQuery(new Term("content", term)), firstCollector);
-        DistinctValuesCollector<Comparable<?>> distinctValuesCollector
-            = createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
+        DistinctValuesCollector<Comparable<Object>, Comparable<Object>> distinctValuesCollector
+            = createDistinctCountCollector(firstCollector, COUNT_FIELD);
         searcher.search(new TermQuery(new Term("content", term)), distinctValuesCollector);
         @SuppressWarnings("unchecked")
-        List<DistinctValuesCollector.GroupCount<Comparable<?>>> actualResult = distinctValuesCollector.getGroups();
+        List<DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>>> actualResult = distinctValuesCollector.getGroups();
 
         if (VERBOSE) {
           System.out.println("Index iter=" + indexIter);
@@ -265,8 +255,8 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
 
         assertEquals(expectedResult.size(), actualResult.size());
         for (int i = 0; i < expectedResult.size(); i++) {
-          DistinctValuesCollector.GroupCount<Comparable<?>> expected = expectedResult.get(i);
-          DistinctValuesCollector.GroupCount<Comparable<?>> actual = actualResult.get(i);
+          DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>> expected = expectedResult.get(i);
+          DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>> actual = actualResult.get(i);
           assertValues(expected.groupValue, actual.groupValue);
           assertEquals(expected.uniqueValues.size(), actual.uniqueValues.size());
           List<Comparable<?>> expectedUniqueValues = new ArrayList<>(expected.uniqueValues);
@@ -283,9 +273,9 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
     }
   }
 
-  private void printGroups(List<? extends DistinctValuesCollector.GroupCount<Comparable<?>>> results) {
+  private void printGroups(List<DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>>> results) {
     for(int i=0;i<results.size();i++) {
-      DistinctValuesCollector.GroupCount<Comparable<?>> group = results.get(i);
+      DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>> group = results.get(i);
       Object gv = group.groupValue;
       if (gv instanceof BytesRef) {
         System.out.println(i + ": groupValue=" + ((BytesRef) gv).utf8ToString());
@@ -350,15 +340,16 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
   }
 
   @SuppressWarnings({"unchecked","rawtypes"})
-  private <T extends Comparable> DistinctValuesCollector<T> createDistinctCountCollector(FirstPassGroupingCollector<T> firstPassGroupingCollector,
-                                                                                                                             String groupField,
-                                                                                                                             String countField) throws IOException {
-    Random random = random();
+  private <T extends Comparable<Object>, R extends Comparable<Object>> DistinctValuesCollector<T, R> createDistinctCountCollector(FirstPassGroupingCollector<T> firstPassGroupingCollector,
+                                                                                         String countField) throws IOException {
     Collection<SearchGroup<T>> searchGroups = firstPassGroupingCollector.getTopGroups(0, false);
-    if (FunctionFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) {
-      return (DistinctValuesCollector) new FunctionDistinctValuesCollector(new HashMap<>(), new BytesRefFieldSource(groupField), new BytesRefFieldSource(countField), (Collection) searchGroups);
+    GroupSelector<T> selector = firstPassGroupingCollector.getGroupSelector();
+    if (ValueSourceGroupSelector.class.isAssignableFrom(selector.getClass())) {
+      GroupSelector gs = new ValueSourceGroupSelector(new BytesRefFieldSource(countField), new HashMap<>());
+      return new DistinctValuesCollector<>(selector, searchGroups, gs);
     } else {
-      return (DistinctValuesCollector) new TermDistinctValuesCollector(groupField, countField, (Collection) searchGroups);
+      GroupSelector ts = new TermGroupSelector(countField);
+      return new DistinctValuesCollector<>(selector, searchGroups, ts);
     }
   }
 
@@ -366,21 +357,14 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
   private <T> FirstPassGroupingCollector<T> createRandomFirstPassCollector(Sort groupSort, String groupField, int topNGroups) throws IOException {
     Random random = random();
     if (random.nextBoolean()) {
-      return (FirstPassGroupingCollector<T>) new FunctionFirstPassGroupingCollector(new BytesRefFieldSource(groupField), new HashMap<>(), groupSort, topNGroups);
+      return (FirstPassGroupingCollector<T>) new FirstPassGroupingCollector<>(new ValueSourceGroupSelector(new BytesRefFieldSource(groupField), new HashMap<>()), groupSort, topNGroups);
     } else {
-      return (FirstPassGroupingCollector<T>) new TermFirstPassGroupingCollector(groupField, groupSort, topNGroups);
+      return (FirstPassGroupingCollector<T>) new FirstPassGroupingCollector<>(new TermGroupSelector(groupField), groupSort, topNGroups);
     }
   }
 
   @SuppressWarnings({"unchecked","rawtypes"})
-  private List<DistinctValuesCollector.GroupCount<Comparable<?>>> createExpectedResult(IndexContext context, String term, Sort groupSort, int topN) {
-    class GroupCount extends DistinctValuesCollector.GroupCount<BytesRef> {
-      GroupCount(BytesRef groupValue, Collection<BytesRef> uniqueValues) {
-        super(groupValue);
-        this.uniqueValues.addAll(uniqueValues);
-      }
-    }
-
+  private List<DistinctValuesCollector.GroupCount<Comparable<Object>, Comparable<Object>>> createExpectedResult(IndexContext context, String term, Sort groupSort, int topN) {
     List result = new ArrayList();
     Map<String, Set<String>> groupCounts = context.searchTermToGroupCounts.get(term);
     int i = 0;
@@ -392,7 +376,7 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
       for (String val : groupCounts.get(group)) {
         uniqueValues.add(val != null ? new BytesRef(val) : null);
       }
-      result.add(new GroupCount(group != null ? new BytesRef(group) : null, uniqueValues));
+      result.add(new DistinctValuesCollector.GroupCount(group != null ? new BytesRef(group) : null, uniqueValues));
     }
     return result;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java
index c590502..968ce5a 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java
@@ -44,7 +44,6 @@ import org.apache.lucene.index.Term;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.grouping.term.TermGroupFacetCollector;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.TestUtil;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
index f079b85..b322fba 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
@@ -58,12 +58,6 @@ import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.Weight;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector;
-import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.function.FunctionSecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
-import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
@@ -147,7 +141,7 @@ public class TestGrouping extends LuceneTestCase {
     final FirstPassGroupingCollector<?> c1 = createRandomFirstPassCollector(groupField, groupSort, 10);
     indexSearcher.search(new TermQuery(new Term("content", "random")), c1);
 
-    final SecondPassGroupingCollector<?> c2 = createSecondPassCollector(c1, groupField, groupSort, Sort.RELEVANCE, 0, 5, true, true, true);
+    final TopGroupsCollector<?> c2 = createSecondPassCollector(c1, groupSort, Sort.RELEVANCE, 0, 5, true, true, true);
     indexSearcher.search(new TermQuery(new Term("content", "random")), c2);
 
     final TopGroups<?> groups = c2.getTopGroups(0);
@@ -196,31 +190,26 @@ public class TestGrouping extends LuceneTestCase {
   }
 
   private FirstPassGroupingCollector<?> createRandomFirstPassCollector(String groupField, Sort groupSort, int topDocs) throws IOException {
-    FirstPassGroupingCollector<?> selected;
     if (random().nextBoolean()) {
       ValueSource vs = new BytesRefFieldSource(groupField);
-      selected = new FunctionFirstPassGroupingCollector(vs, new HashMap<>(), groupSort, topDocs);
+      return new FirstPassGroupingCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()), groupSort, topDocs);
     } else {
-      selected = new TermFirstPassGroupingCollector(groupField, groupSort, topDocs);
+      return new FirstPassGroupingCollector<>(new TermGroupSelector(groupField), groupSort, topDocs);
     }
-    if (VERBOSE) {
-      System.out.println("Selected implementation: " + selected.getClass().getName());
-    }
-    return selected;
   }
 
   private FirstPassGroupingCollector<?> createFirstPassCollector(String groupField, Sort groupSort, int topDocs, FirstPassGroupingCollector<?> firstPassGroupingCollector) throws IOException {
-    if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) {
+    GroupSelector<?> selector = firstPassGroupingCollector.getGroupSelector();
+    if (TermGroupSelector.class.isAssignableFrom(selector.getClass())) {
       ValueSource vs = new BytesRefFieldSource(groupField);
-      return new FunctionFirstPassGroupingCollector(vs, new HashMap<>(), groupSort, topDocs);
+      return new FirstPassGroupingCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()), groupSort, topDocs);
     } else {
-      return new TermFirstPassGroupingCollector(groupField, groupSort, topDocs);
+      return new FirstPassGroupingCollector<>(new TermGroupSelector(groupField), groupSort, topDocs);
     }
   }
 
   @SuppressWarnings({"unchecked","rawtypes"})
-  private <T> SecondPassGroupingCollector<T> createSecondPassCollector(FirstPassGroupingCollector firstPassGroupingCollector,
-                                                                       String groupField,
+  private <T> TopGroupsCollector<T> createSecondPassCollector(FirstPassGroupingCollector firstPassGroupingCollector,
                                                                        Sort groupSort,
                                                                        Sort sortWithinGroup,
                                                                        int groupOffset,
@@ -229,19 +218,13 @@ public class TestGrouping extends LuceneTestCase {
                                                                        boolean getMaxScores,
                                                                        boolean fillSortFields) throws IOException {
 
-    if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) {
-      Collection<SearchGroup<BytesRef>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields);
-      return (SecondPassGroupingCollector) new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields);
-    } else {
-      ValueSource vs = new BytesRefFieldSource(groupField);
-      Collection<SearchGroup<MutableValue>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields);
-      return (SecondPassGroupingCollector) new FunctionSecondPassGroupingCollector(searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap());
-    }
+    Collection<SearchGroup<T>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields);
+    return new TopGroupsCollector<>(firstPassGroupingCollector.getGroupSelector(), searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
   }
 
   // Basically converts searchGroups from MutableValue to BytesRef if grouping by ValueSource
   @SuppressWarnings("unchecked")
-  private SecondPassGroupingCollector<?> createSecondPassCollector(FirstPassGroupingCollector<?> firstPassGroupingCollector,
+  private TopGroupsCollector<?> createSecondPassCollector(FirstPassGroupingCollector<?> firstPassGroupingCollector,
                                                                    String groupField,
                                                                    Collection<SearchGroup<BytesRef>> searchGroups,
                                                                    Sort groupSort,
@@ -250,8 +233,9 @@ public class TestGrouping extends LuceneTestCase {
                                                                    boolean getScores,
                                                                    boolean getMaxScores,
                                                                    boolean fillSortFields) throws IOException {
-    if (firstPassGroupingCollector.getClass().isAssignableFrom(TermFirstPassGroupingCollector.class)) {
-      return new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields);
+    if (firstPassGroupingCollector.getGroupSelector().getClass().isAssignableFrom(TermGroupSelector.class)) {
+      GroupSelector<BytesRef> selector = (GroupSelector<BytesRef>) firstPassGroupingCollector.getGroupSelector();
+      return new TopGroupsCollector<>(selector, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields);
     } else {
       ValueSource vs = new BytesRefFieldSource(groupField);
       List<SearchGroup<MutableValue>> mvalSearchGroups = new ArrayList<>(searchGroups.size());
@@ -267,19 +251,14 @@ public class TestGrouping extends LuceneTestCase {
         sg.sortValues = mergedTopGroup.sortValues;
         mvalSearchGroups.add(sg);
       }
-
-      return new FunctionSecondPassGroupingCollector(mvalSearchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap<>());
+      ValueSourceGroupSelector selector = new ValueSourceGroupSelector(vs, new HashMap<>());
+      return new TopGroupsCollector<>(selector, mvalSearchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
     }
   }
 
   private AllGroupsCollector<?> createAllGroupsCollector(FirstPassGroupingCollector<?> firstPassGroupingCollector,
                                                          String groupField) {
-    if (firstPassGroupingCollector.getClass().isAssignableFrom(TermFirstPassGroupingCollector.class)) {
-      return new TermAllGroupsCollector(groupField);
-    } else {
-      ValueSource vs = new BytesRefFieldSource(groupField);
-      return new FunctionAllGroupsCollector(vs, new HashMap<>());
-    }
+    return new AllGroupsCollector<>(firstPassGroupingCollector.getGroupSelector());
   }
 
   private void compareGroupValue(String expected, GroupDocs<?> group) {
@@ -306,10 +285,12 @@ public class TestGrouping extends LuceneTestCase {
   }
 
   private Collection<SearchGroup<BytesRef>> getSearchGroups(FirstPassGroupingCollector<?> c, int groupOffset, boolean fillFields) throws IOException {
-    if (TermFirstPassGroupingCollector.class.isAssignableFrom(c.getClass())) {
-      return ((TermFirstPassGroupingCollector) c).getTopGroups(groupOffset, fillFields);
-    } else if (FunctionFirstPassGroupingCollector.class.isAssignableFrom(c.getClass())) {
-      Collection<SearchGroup<MutableValue>> mutableValueGroups = ((FunctionFirstPassGroupingCollector) c).getTopGroups(groupOffset, fillFields);
+    if (TermGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
+      FirstPassGroupingCollector<BytesRef> collector = (FirstPassGroupingCollector<BytesRef>) c;
+      return collector.getTopGroups(groupOffset, fillFields);
+    } else if (ValueSourceGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
+      FirstPassGroupingCollector<MutableValue> collector = (FirstPassGroupingCollector<MutableValue>) c;
+      Collection<SearchGroup<MutableValue>> mutableValueGroups = collector.getTopGroups(groupOffset, fillFields);
       if (mutableValueGroups == null) {
         return null;
       }
@@ -328,11 +309,13 @@ public class TestGrouping extends LuceneTestCase {
   }
 
   @SuppressWarnings({"unchecked", "rawtypes"})
-  private TopGroups<BytesRef> getTopGroups(SecondPassGroupingCollector c, int withinGroupOffset) {
-    if (c.getClass().isAssignableFrom(TermSecondPassGroupingCollector.class)) {
-      return ((TermSecondPassGroupingCollector) c).getTopGroups(withinGroupOffset);
-    } else if (c.getClass().isAssignableFrom(FunctionSecondPassGroupingCollector.class)) {
-      TopGroups<MutableValue> mvalTopGroups = ((FunctionSecondPassGroupingCollector) c).getTopGroups(withinGroupOffset);
+  private TopGroups<BytesRef> getTopGroups(TopGroupsCollector c, int withinGroupOffset) {
+    if (c.getGroupSelector().getClass().isAssignableFrom(TermGroupSelector.class)) {
+      TopGroupsCollector<BytesRef> collector = (TopGroupsCollector<BytesRef>) c;
+      return collector.getTopGroups(withinGroupOffset);
+    } else if (c.getGroupSelector().getClass().isAssignableFrom(ValueSourceGroupSelector.class)) {
+      TopGroupsCollector<MutableValue> collector = (TopGroupsCollector<MutableValue>) c;
+      TopGroups<MutableValue> mvalTopGroups = collector.getTopGroups(withinGroupOffset);
       List<GroupDocs<BytesRef>> groups = new ArrayList<>(mvalTopGroups.groups.length);
       for (GroupDocs<MutableValue> mvalGd : mvalTopGroups.groups) {
         BytesRef groupValue = mvalGd.groupValue.exists() ? ((MutableValueStr) mvalGd.groupValue).value.get() : null;
@@ -952,8 +935,8 @@ public class TestGrouping extends LuceneTestCase {
         // Get 1st pass top groups using shards
         
         final TopGroups<BytesRef> topGroupsShards = searchShards(s, shards.subSearchers, query, groupSort, docSort,
-            groupOffset, topNGroups, docOffset, docsPerGroup, getScores, getMaxScores, true, false);
-        final SecondPassGroupingCollector<?> c2;
+            groupOffset, topNGroups, docOffset, docsPerGroup, getScores, getMaxScores, true, true);
+        final TopGroupsCollector<?> c2;
         if (topGroups != null) {
           
           if (VERBOSE) {
@@ -963,7 +946,7 @@ public class TestGrouping extends LuceneTestCase {
             }
           }
           
-          c2 = createSecondPassCollector(c1, groupField, groupSort, docSort, groupOffset, docOffset + docsPerGroup, getScores, getMaxScores, fillFields);
+          c2 = createSecondPassCollector(c1, groupSort, docSort, groupOffset, docOffset + docsPerGroup, getScores, getMaxScores, fillFields);
           if (doCache) {
             if (cCache.isCached()) {
               if (VERBOSE) {
@@ -1050,13 +1033,13 @@ public class TestGrouping extends LuceneTestCase {
         
         final boolean needsScores = getScores || getMaxScores || docSort == null;
         final BlockGroupingCollector c3 = new BlockGroupingCollector(groupSort, groupOffset+topNGroups, needsScores, sBlocks.createNormalizedWeight(lastDocInBlock, false));
-        final TermAllGroupsCollector allGroupsCollector2;
+        final AllGroupsCollector<BytesRef> allGroupsCollector2;
         final Collector c4;
         if (doAllGroups) {
           // NOTE: must be "group" and not "group_dv"
           // (groupField) because we didn't index doc
           // values in the block index:
-          allGroupsCollector2 = new TermAllGroupsCollector("group");
+          allGroupsCollector2 = new AllGroupsCollector<>(new TermGroupSelector("group"));
           c4 = MultiCollector.wrap(c3, allGroupsCollector2);
         } else {
           allGroupsCollector2 = null;
@@ -1223,7 +1206,7 @@ public class TestGrouping extends LuceneTestCase {
       @SuppressWarnings({"unchecked","rawtypes"})
       final TopGroups<BytesRef>[] shardTopGroups = new TopGroups[subSearchers.length];
       for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) {
-        final SecondPassGroupingCollector<?> secondPassCollector = createSecondPassCollector(firstPassGroupingCollectors.get(shardIDX),
+        final TopGroupsCollector<?> secondPassCollector = createSecondPassCollector(firstPassGroupingCollectors.get(shardIDX),
             groupField, mergedTopGroups, groupSort, docSort, docOffset + topNDocs, getScores, getMaxScores, true);
         subSearchers[shardIDX].search(w, secondPassCollector);
         shardTopGroups[shardIDX] = getTopGroups(secondPassCollector, 0);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/MultiTermHighlighting.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/MultiTermHighlighting.java b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/MultiTermHighlighting.java
index 89403d5..15f4bdd 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/MultiTermHighlighting.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/MultiTermHighlighting.java
@@ -28,12 +28,14 @@ import org.apache.lucene.index.Term;
 import org.apache.lucene.search.AutomatonQuery;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.BoostQuery;
 import org.apache.lucene.search.ConstantScoreQuery;
 import org.apache.lucene.search.DisjunctionMaxQuery;
 import org.apache.lucene.search.FuzzyQuery;
 import org.apache.lucene.search.PrefixQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermRangeQuery;
+import org.apache.lucene.search.spans.SpanBoostQuery;
 import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
 import org.apache.lucene.search.spans.SpanNearQuery;
 import org.apache.lucene.search.spans.SpanNotQuery;
@@ -64,6 +66,8 @@ class MultiTermHighlighting {
                                                         Predicate<String> fieldMatcher,
                                                         boolean lookInSpan,
                                                         Function<Query, Collection<Query>> preRewriteFunc) {
+    // TODO Lucene needs a Query visitor API!  LUCENE-3041
+
     List<CharacterRunAutomaton> list = new ArrayList<>();
     Collection<Query> customSubQueries = preRewriteFunc.apply(query);
     if (customSubQueries != null) {
@@ -79,6 +83,9 @@ class MultiTermHighlighting {
     } else if (query instanceof ConstantScoreQuery) {
       list.addAll(Arrays.asList(extractAutomata(((ConstantScoreQuery) query).getQuery(), fieldMatcher, lookInSpan,
           preRewriteFunc)));
+    } else if (query instanceof BoostQuery) {
+      list.addAll(Arrays.asList(extractAutomata(((BoostQuery)query).getQuery(), fieldMatcher, lookInSpan,
+          preRewriteFunc)));
     } else if (query instanceof DisjunctionMaxQuery) {
       for (Query sub : ((DisjunctionMaxQuery) query).getDisjuncts()) {
         list.addAll(Arrays.asList(extractAutomata(sub, fieldMatcher, lookInSpan, preRewriteFunc)));
@@ -97,6 +104,9 @@ class MultiTermHighlighting {
     } else if (lookInSpan && query instanceof SpanPositionCheckQuery) {
       list.addAll(Arrays.asList(extractAutomata(((SpanPositionCheckQuery) query).getMatch(), fieldMatcher, lookInSpan,
           preRewriteFunc)));
+    } else if (lookInSpan && query instanceof SpanBoostQuery) {
+      list.addAll(Arrays.asList(extractAutomata(((SpanBoostQuery) query).getQuery(), fieldMatcher, lookInSpan,
+          preRewriteFunc)));
     } else if (lookInSpan && query instanceof SpanMultiTermQueryWrapper) {
       list.addAll(Arrays.asList(extractAutomata(((SpanMultiTermQueryWrapper<?>) query).getWrappedQuery(),
           fieldMatcher, lookInSpan, preRewriteFunc)));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java
----------------------------------------------------------------------
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java b/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java
index 4a4b7ed..57f174f 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java
@@ -38,6 +38,7 @@ import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.BoostQuery;
 import org.apache.lucene.search.ConstantScoreQuery;
 import org.apache.lucene.search.DisjunctionMaxQuery;
 import org.apache.lucene.search.FuzzyQuery;
@@ -52,6 +53,7 @@ import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TermRangeQuery;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.WildcardQuery;
+import org.apache.lucene.search.spans.SpanBoostQuery;
 import org.apache.lucene.search.spans.SpanFirstQuery;
 import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
 import org.apache.lucene.search.spans.SpanNearQuery;
@@ -163,7 +165,8 @@ public class TestUnifiedHighlighterMTQ extends LuceneTestCase {
 
     IndexSearcher searcher = newSearcher(ir);
     UnifiedHighlighter highlighter = new UnifiedHighlighter(searcher, indexAnalyzer);
-    Query query = new PrefixQuery(new Term("body", "te"));
+    // wrap in a BoostQuery to also show we see inside it
+    Query query = new BoostQuery(new PrefixQuery(new Term("body", "te")), 2.0f);
     TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
     assertEquals(2, topDocs.totalHits);
     String snippets[] = highlighter.highlight("body", query, topDocs);
@@ -522,7 +525,9 @@ public class TestUnifiedHighlighterMTQ extends LuceneTestCase {
 
     IndexSearcher searcher = newSearcher(ir);
     UnifiedHighlighter highlighter = new UnifiedHighlighter(searcher, indexAnalyzer);
-    Query query = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*")));
+    // wrap in a SpanBoostQuery to also show we see inside it
+    Query query = new SpanBoostQuery(
+        new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*"))), 2.0f);
     TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
     assertEquals(2, topDocs.totalHits);
     String snippets[] = highlighter.highlight("body", query, topDocs);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/ivy-versions.properties
----------------------------------------------------------------------
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 6bd3f8d..86f8cc1 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -265,5 +265,9 @@ org.slf4j.version = 1.7.7
 
 /org.tukaani/xz = 1.5
 /rome/rome = 1.0
+
+ua.net.nlp.morfologik-ukrainian-search.version = 3.7.5
+/ua.net.nlp/morfologik-ukrainian-search = ${ua.net.nlp.morfologik-ukrainian-search.version}
+
 /xerces/xercesImpl = 2.9.1
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1 b/lucene/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1
new file mode 100644
index 0000000..8794e71
--- /dev/null
+++ b/lucene/licenses/morfologik-ukrainian-search-3.7.5.jar.sha1
@@ -0,0 +1 @@
+2b8c8fbd740164d220ca7d18605b8b2092e163e9

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/licenses/morfologik-ukrainian-search-LICENSE-ASL.txt
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-ukrainian-search-LICENSE-ASL.txt b/lucene/licenses/morfologik-ukrainian-search-LICENSE-ASL.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/lucene/licenses/morfologik-ukrainian-search-LICENSE-ASL.txt
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/licenses/morfologik-ukrainian-search-NOTICE.txt
----------------------------------------------------------------------
diff --git a/lucene/licenses/morfologik-ukrainian-search-NOTICE.txt b/lucene/licenses/morfologik-ukrainian-search-NOTICE.txt
new file mode 100644
index 0000000..df3fa1d
--- /dev/null
+++ b/lucene/licenses/morfologik-ukrainian-search-NOTICE.txt
@@ -0,0 +1,6 @@
+morfologik-ukrainian-search is a POS tag dictionary in morfologik format adjusted for searching.
+It's part of dict_uk project (https://github.com/brown-uk/dict_uk)
+
+Note: to better fit into full-text search model this dictionary has all word forms in lower case but keeps lemmas for proper nouns in upper case.
+
+Licensed under Apache License 2.0.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index 4bd72e9..a1f2b07 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -1217,7 +1217,7 @@ public class MemoryIndex {
     @Override
     public PointValues getPointValues(String fieldName) {
       Info info = fields.get(fieldName);
-      if (info.pointValues == null) {
+      if (info == null || info.pointValues == null) {
         return null;
       }
       return new MemoryIndexPointValues(info);
@@ -1529,6 +1529,7 @@ public class MemoryIndex {
 
       MemoryIndexPointValues(Info info) {
         this.info = Objects.requireNonNull(info);
+        Objects.requireNonNull(info.pointValues, "Field does not have points");
       }
 
       @Override
@@ -1548,12 +1549,7 @@ public class MemoryIndex {
 
       @Override
       public byte[] getMinPackedValue() throws IOException {
-        BytesRef[] values = info.pointValues;
-        if (values != null) {
-          return info.minPackedValue;
-        } else {
-          return null;
-        }
+        return info.minPackedValue;
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
----------------------------------------------------------------------
diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
index f34f30c..1e20f30 100644
--- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
+++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndex.java
@@ -40,6 +40,7 @@ import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedNumericDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.document.StoredField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.BinaryDocValues;
@@ -422,6 +423,17 @@ public class TestMemoryIndex extends LuceneTestCase {
     }
   }
 
+  public void testMissingPoints() throws IOException {
+    Document doc = new Document();
+    doc.add(new StoredField("field", 42));
+    MemoryIndex mi = MemoryIndex.fromDocument(doc, analyzer);
+    IndexSearcher indexSearcher = mi.createSearcher();
+    // field that exists but does not have points
+    assertNull(indexSearcher.getIndexReader().leaves().get(0).reader().getPointValues("field"));
+    // field that does not exist
+    assertNull(indexSearcher.getIndexReader().leaves().get(0).reader().getPointValues("some_missing_field"));
+  }
+
   public void testPointValuesDoNotAffectPositionsOrOffset() throws Exception {
     MemoryIndex mi = new MemoryIndex(true, true);
     mi.addField(new TextField("text", "quick brown fox", Field.Store.NO), analyzer);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java b/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
index 5fa1fb9..84f0d6b 100644
--- a/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
+++ b/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
@@ -68,9 +68,6 @@ public class InetAddressRange extends Field {
    * @param max range max value; defined as an {@code InetAddress}
    */
   public void setRangeValues(InetAddress min, InetAddress max) {
-    if (StringHelper.compare(BYTES, min.getAddress(), 0, max.getAddress(), 0) > 0) {
-      throw new IllegalArgumentException("min value cannot be greater than max value for range field (name=" + name + ")");
-    }
     final byte[] bytes;
     if (fieldsData == null) {
       bytes = new byte[BYTES*2];
@@ -83,8 +80,15 @@ public class InetAddressRange extends Field {
 
   /** encode the min/max range into the provided byte array */
   private static void encode(final InetAddress min, final InetAddress max, final byte[] bytes) {
-    System.arraycopy(InetAddressPoint.encode(min), 0, bytes, 0, BYTES);
-    System.arraycopy(InetAddressPoint.encode(max), 0, bytes, BYTES, BYTES);
+    // encode min and max value (consistent w/ InetAddressPoint encoding)
+    final byte[] minEncoded = InetAddressPoint.encode(min);
+    final byte[] maxEncoded = InetAddressPoint.encode(max);
+    // ensure min is lt max
+    if (StringHelper.compare(BYTES, minEncoded, 0, maxEncoded, 0) > 0) {
+      throw new IllegalArgumentException("min value cannot be greater than max value for InetAddressRange field");
+    }
+    System.arraycopy(minEncoded, 0, bytes, 0, BYTES);
+    System.arraycopy(maxEncoded, 0, bytes, BYTES, BYTES);
   }
 
   /** encode the min/max range and return the byte array */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
index f07793a..1942734 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
@@ -154,7 +154,7 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
         }
         
         @Override
-        public long longValue() {
+        public long longValue() throws IOException {
           // Keys are always expressed as a long so we obtain the
           // ordinal for our String-based artist name here
           return sdv.ordValue();


[22/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java
index fa22320..d273057 100644
--- a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java
+++ b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Locale;
+import java.util.stream.Collectors;
 
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.store.ChecksumIndexInput;
@@ -242,17 +243,22 @@ public class OfflineSorter {
     sortInfo = new SortInfo();
     sortInfo.totalTime = System.currentTimeMillis();
 
-    List<String> segments = new ArrayList<>();
+    List<PartitionAndCount> segments = new ArrayList<>();
     int[] levelCounts = new int[1];
 
     // So we can remove any partially written temp files on exception:
     TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
 
     boolean success = false;
+    boolean[] isExhausted = new boolean[1];
     try (ByteSequencesReader is = getReader(dir.openChecksumInput(inputFileName, IOContext.READONCE), inputFileName)) {
-      int lineCount;
-      while ((lineCount = readPartition(is)) > 0) {
-        segments.add(sortPartition(trackingDir));
+      while (isExhausted[0] == false) {
+        int lineCount = readPartition(is, isExhausted);
+        if (lineCount == 0) {
+          assert isExhausted[0];
+          break;
+        }
+        segments.add(sortPartition(trackingDir, lineCount));
         sortInfo.tempMergeFiles++;
         sortInfo.lineCount += lineCount;
         levelCounts[0]++;
@@ -286,7 +292,7 @@ public class OfflineSorter {
           result = out.getName();
         }
       } else {
-        result = segments.get(0);
+        result = segments.get(0).fileName;
       }
 
       // We should be explicitly removing all intermediate files ourselves unless there is an exception:
@@ -308,10 +314,10 @@ public class OfflineSorter {
   }
 
   /** Sort a single partition in-memory. */
-  protected String sortPartition(TrackingDirectoryWrapper trackingDir) throws IOException {
+  protected PartitionAndCount sortPartition(TrackingDirectoryWrapper trackingDir, int lineCount) throws IOException {
 
     try (IndexOutput tempFile = trackingDir.createTempOutput(tempFileNamePrefix, "sort", IOContext.DEFAULT);
-         ByteSequencesWriter out = getWriter(tempFile);) {
+         ByteSequencesWriter out = getWriter(tempFile, lineCount);) {
       
       BytesRef spare;
 
@@ -319,17 +325,21 @@ public class OfflineSorter {
       BytesRefIterator iter = buffer.iterator(comparator);
       sortInfo.sortTime += System.currentTimeMillis() - start;
 
+      int count = 0;
       while ((spare = iter.next()) != null) {
         assert spare.length <= Short.MAX_VALUE;
         out.write(spare);
+        count++;
       }
+
+      assert count == lineCount;
       
       // Clean up the buffer for the next partition.
       buffer.clear();
 
       CodecUtil.writeFooter(out.out);
 
-      return tempFile.getName();
+      return new PartitionAndCount(lineCount, tempFile.getName());
     }
   }
 
@@ -342,16 +352,21 @@ public class OfflineSorter {
   }
 
   /** Merge the most recent {@code maxTempFile} partitions into a new partition. */
-  void mergePartitions(Directory trackingDir, List<String> segments) throws IOException {
+  void mergePartitions(Directory trackingDir, List<PartitionAndCount> segments) throws IOException {
     long start = System.currentTimeMillis();
 
-    List<String> segmentsToMerge;
+    List<PartitionAndCount> segmentsToMerge;
     if (segments.size() > maxTempFiles) {
       segmentsToMerge = segments.subList(segments.size() - maxTempFiles, segments.size());
     } else {
       segmentsToMerge = segments;
     }
 
+    long totalCount = 0;
+    for (PartitionAndCount segment : segmentsToMerge) {
+      totalCount += segment.count;
+    }
+
     PriorityQueue<FileAndTop> queue = new PriorityQueue<FileAndTop>(segmentsToMerge.size()) {
       @Override
       protected boolean lessThan(FileAndTop a, FileAndTop b) {
@@ -363,13 +378,13 @@ public class OfflineSorter {
 
     String newSegmentName = null;
 
-    try (ByteSequencesWriter writer = getWriter(trackingDir.createTempOutput(tempFileNamePrefix, "sort", IOContext.DEFAULT))) {
+    try (ByteSequencesWriter writer = getWriter(trackingDir.createTempOutput(tempFileNamePrefix, "sort", IOContext.DEFAULT), totalCount)) {
 
       newSegmentName = writer.out.getName();
       
       // Open streams and read the top for each file
       for (int i = 0; i < segmentsToMerge.size(); i++) {
-        streams[i] = getReader(dir.openChecksumInput(segmentsToMerge.get(i), IOContext.READONCE), segmentsToMerge.get(i));
+        streams[i] = getReader(dir.openChecksumInput(segmentsToMerge.get(i).fileName, IOContext.READONCE), segmentsToMerge.get(i).fileName);
         BytesRef item = null;
         try {
           item = streams[i].next();
@@ -412,16 +427,16 @@ public class OfflineSorter {
       IOUtils.close(streams);
     }
 
-    IOUtils.deleteFiles(trackingDir, segmentsToMerge);
+    IOUtils.deleteFiles(trackingDir, segmentsToMerge.stream().map(segment -> segment.fileName).collect(Collectors.toList()));
 
     segmentsToMerge.clear();
-    segments.add(newSegmentName);
+    segments.add(new PartitionAndCount(totalCount, newSegmentName));
 
     sortInfo.tempMergeFiles++;
   }
 
-  /** Read in a single partition of data */
-  int readPartition(ByteSequencesReader reader) throws IOException {
+  /** Read in a single partition of data, setting isExhausted[0] to true if there are no more items. */
+  int readPartition(ByteSequencesReader reader, boolean[] isExhausted) throws IOException {
     long start = System.currentTimeMillis();
     if (valueLength != -1) {
       int limit = ramBufferSize.bytes / valueLength;
@@ -433,6 +448,7 @@ public class OfflineSorter {
           verifyChecksum(t, reader);
         }
         if (item == null) {
+          isExhausted[0] = true;
           break;
         }
         buffer.append(item);
@@ -446,6 +462,7 @@ public class OfflineSorter {
           verifyChecksum(t, reader);
         }
         if (item == null) {
+          isExhausted[0] = true;
           break;
         }
         buffer.append(item);
@@ -471,7 +488,7 @@ public class OfflineSorter {
   }
 
   /** Subclasses can override to change how byte sequences are written to disk. */
-  protected ByteSequencesWriter getWriter(IndexOutput out) throws IOException {
+  protected ByteSequencesWriter getWriter(IndexOutput out, long itemCount) throws IOException {
     return new ByteSequencesWriter(out);
   }
 
@@ -587,5 +604,15 @@ public class OfflineSorter {
   /** Returns the comparator in use to sort entries */
   public Comparator<BytesRef> getComparator() {
     return comparator;
-  }  
+  }
+
+  private static class PartitionAndCount {
+    final long count;
+    final String fileName;
+
+    public PartitionAndCount(long count, String fileName) {
+      this.count = count;
+      this.fileName = fileName;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/util/Version.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java
index da6d653..f6e6adc 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Version.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Version.java
@@ -102,6 +102,13 @@ public final class Version {
   public static final Version LUCENE_6_5_0 = new Version(6, 5, 0);
 
   /**
+   * Match settings and bugs in Lucene's 6.5.1 release.
+   * @deprecated Use latest
+   */
+  @Deprecated
+  public static final Version LUCENE_6_5_1 = new Version(6, 5, 1);
+
+  /**
    * Match settings and bugs in Lucene's 6.6.0 release.
    * @deprecated Use latest
    */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
index eeb40fa..8a2356b 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java
@@ -888,7 +888,7 @@ public class BKDWriter implements Closeable {
 
           /** We write/read fixed-byte-width file that {@link OfflinePointReader} can read. */
           @Override
-          protected ByteSequencesWriter getWriter(IndexOutput out) {
+          protected ByteSequencesWriter getWriter(IndexOutput out, long count) {
             return new ByteSequencesWriter(out) {
               @Override
               public void write(byte[] bytes, int off, int len) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java b/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
index 2cc9274..6abbc2b 100644
--- a/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
+++ b/lucene/core/src/test/org/apache/lucene/analysis/standard/TestStandardAnalyzer.java
@@ -393,4 +393,27 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
     Analyzer a = new StandardAnalyzer();
     assertEquals(new BytesRef("\"\\�3[]()! cz@"), a.normalize("dummy", "\"\\�3[]()! Cz@"));
   }
+
+  public void testMaxTokenLengthDefault() throws Exception {
+    StandardAnalyzer a = new StandardAnalyzer();
+
+    StringBuilder bToken = new StringBuilder();
+    // exact max length:
+    for(int i=0;i<StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH;i++) {
+      bToken.append('b');
+    }
+
+    String bString = bToken.toString();
+    // first bString is exact max default length; next one is 1 too long
+    String input = "x " + bString + " " + bString + "b";
+    assertAnalyzesTo(a, input.toString(), new String[] {"x", bString, bString, "b"});
+    a.close();
+  }
+
+  public void testMaxTokenLengthNonDefault() throws Exception {
+    StandardAnalyzer a = new StandardAnalyzer();
+    a.setMaxTokenLength(5);
+    assertAnalyzesTo(a, "ab cd toolong xy z", new String[]{"ab", "cd", "toolo", "ng", "xy", "z"});
+    a.close();
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
index 20088a5..b99666e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexReaderClose.java
@@ -73,7 +73,7 @@ public class TestIndexReaderClose extends LuceneTestCase {
             reader.getReaderCacheHelper().addClosedListener(new FaultyListener());
           } else {
             count.incrementAndGet();
-            reader.getReaderCacheHelper().addClosedListener(new CountListener(count));
+            reader.getReaderCacheHelper().addClosedListener(new CountListener(count, reader.getReaderCacheHelper().getKey()));
           }
       }
       if (!faultySet && !throwOnClose) {
@@ -123,7 +123,7 @@ public class TestIndexReaderClose extends LuceneTestCase {
     AtomicInteger counter = new AtomicInteger(numListeners);
 
     for (int i = 0; i < numListeners; ++i) {
-      CountCoreListener listener = new CountCoreListener(counter, leafReader.getCoreCacheHelper().getKey());
+      CountListener listener = new CountListener(counter, leafReader.getCoreCacheHelper().getKey());
       listeners.add(listener);
       leafReader.getCoreCacheHelper().addClosedListener(listener);
     }
@@ -141,12 +141,12 @@ public class TestIndexReaderClose extends LuceneTestCase {
     w.w.getDirectory().close();
   }
 
-  private static final class CountCoreListener implements IndexReader.ClosedListener {
+  private static final class CountListener implements IndexReader.ClosedListener {
 
     private final AtomicInteger count;
     private final Object coreCacheKey;
 
-    public CountCoreListener(AtomicInteger count, Object coreCacheKey) {
+    public CountListener(AtomicInteger count, Object coreCacheKey) {
       this.count = count;
       this.coreCacheKey = coreCacheKey;
     }
@@ -159,25 +159,33 @@ public class TestIndexReaderClose extends LuceneTestCase {
 
   }
 
-  private static final class CountListener implements IndexReader.ClosedListener  {
-    private final AtomicInteger count;
-
-    public CountListener(AtomicInteger count) {
-      this.count = count;
-    }
+  private static final class FaultyListener implements IndexReader.ClosedListener {
 
     @Override
     public void onClose(IndexReader.CacheKey cacheKey) {
-      count.decrementAndGet();
+      throw new IllegalStateException("GRRRRRRRRRRRR!");
     }
   }
 
-  private static final class FaultyListener implements IndexReader.ClosedListener {
+  public void testRegisterListenerOnClosedReader() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
+    w.addDocument(new Document());
+    DirectoryReader r = DirectoryReader.open(w);
+    w.close();
 
-    @Override
-    public void onClose(IndexReader.CacheKey cacheKey) {
-      throw new IllegalStateException("GRRRRRRRRRRRR!");
-    }
+    // The reader is open, everything should work
+    r.getReaderCacheHelper().addClosedListener(key -> {});
+    r.leaves().get(0).reader().getReaderCacheHelper().addClosedListener(key -> {});
+    r.leaves().get(0).reader().getCoreCacheHelper().addClosedListener(key -> {});
+
+    // But now we close
+    r.close();
+    expectThrows(AlreadyClosedException.class, () -> r.getReaderCacheHelper().addClosedListener(key -> {}));
+    expectThrows(AlreadyClosedException.class, () -> r.leaves().get(0).reader().getReaderCacheHelper().addClosedListener(key -> {}));
+    expectThrows(AlreadyClosedException.class, () -> r.leaves().get(0).reader().getCoreCacheHelper().addClosedListener(key -> {}));
+
+    dir.close();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/test/org/apache/lucene/util/TestByteBlockPool.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/TestByteBlockPool.java b/lucene/core/src/test/org/apache/lucene/util/TestByteBlockPool.java
index df73687..475f716 100644
--- a/lucene/core/src/test/org/apache/lucene/util/TestByteBlockPool.java
+++ b/lucene/core/src/test/org/apache/lucene/util/TestByteBlockPool.java
@@ -18,6 +18,7 @@ package org.apache.lucene.util;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 public class TestByteBlockPool extends LuceneTestCase {
@@ -34,8 +35,7 @@ public class TestByteBlockPool extends LuceneTestCase {
       final int numValues = atLeast(100);
       BytesRefBuilder ref = new BytesRefBuilder();
       for (int i = 0; i < numValues; i++) {
-        final String value = TestUtil.randomRealisticUnicodeString(random(),
-            maxLength);
+        final String value = TestUtil.randomRealisticUnicodeString(random(), maxLength);
         list.add(new BytesRef(value));
         ref.copyChars(value);
         pool.append(ref.get());
@@ -76,5 +76,33 @@ public class TestByteBlockPool extends LuceneTestCase {
         pool.nextBuffer(); // prepare for next iter
       }
     }
-  } 
+  }
+
+  public void testLargeRandomBlocks() throws IOException {
+    Counter bytesUsed = Counter.newCounter();
+    ByteBlockPool pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(bytesUsed));
+    pool.nextBuffer();
+
+    List<byte[]> items = new ArrayList<>();
+    for (int i=0;i<100;i++) {
+      int size;
+      if (random().nextBoolean()) {
+        size = TestUtil.nextInt(random(), 100, 1000);
+      } else {
+        size = TestUtil.nextInt(random(), 50000, 100000);
+      }
+      byte[] bytes = new byte[size];
+      random().nextBytes(bytes);
+      items.add(bytes);
+      pool.append(new BytesRef(bytes));
+    }
+
+    long position = 0;
+    for (byte[] expected : items) {
+      byte[] actual = new byte[expected.length];
+      pool.readBytes(position, actual, 0, actual.length);
+      assertTrue(Arrays.equals(expected, actual));
+      position += expected.length;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java b/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java
index 49ed110..839f103 100644
--- a/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java
+++ b/lucene/core/src/test/org/apache/lucene/util/TestOfflineSorter.java
@@ -28,6 +28,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.store.ChecksumIndexInput;
 import org.apache.lucene.store.CorruptingIndexOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FilterDirectory;
@@ -455,6 +456,47 @@ public class TestOfflineSorter extends LuceneTestCase {
     dir.close();
   }
 
+  // OfflineSorter should not call my BytesSequencesReader.next() again after it already returned null:
+  public void testOverNexting() throws Exception {
+    Directory dir = newDirectory();
+    IndexOutput out = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT);
+    try (ByteSequencesWriter w = new OfflineSorter.ByteSequencesWriter(out)) {
+      byte[] bytes = new byte[Integer.BYTES];
+      random().nextBytes(bytes);
+      w.write(bytes);
+      CodecUtil.writeFooter(out);
+    }
+
+    new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(4), OfflineSorter.MAX_TEMPFILES, Integer.BYTES) {
+      @Override
+      protected ByteSequencesReader getReader(ChecksumIndexInput in, String name) throws IOException {
+        ByteSequencesReader other = super.getReader(in, name);
+
+        return new ByteSequencesReader(in, name) {
+
+          private boolean alreadyEnded;
+              
+          @Override
+          public BytesRef next() throws IOException {
+            // if we returned null already, OfflineSorter should not call next() again
+            assertFalse(alreadyEnded);
+            BytesRef result = other.next();
+            if (result == null) {
+              alreadyEnded = true;
+            }
+            return result;
+          }
+
+          @Override
+          public void close() throws IOException {
+            other.close();
+          }
+        };
+      }
+    }.sort(out.getName());
+    dir.close();
+  }
+
   public void testInvalidFixedLength() throws Exception {
     IllegalArgumentException e;
     e = expectThrows(IllegalArgumentException.class,

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupHeadsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupHeadsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupHeadsCollector.java
index b5fbdc3..503b952 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupHeadsCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupHeadsCollector.java
@@ -18,27 +18,62 @@ package org.apache.lucene.search.grouping;
 
 import java.io.IOException;
 import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
 
 import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
+import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.SimpleCollector;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
 import org.apache.lucene.util.FixedBitSet;
 
 /**
- * This collector specializes in collecting the most relevant document (group head) for each group that match the query.
+ * This collector specializes in collecting the most relevant document (group head) for each
+ * group that matches the query.
+ *
+ * Clients should create new collectors by calling {@link #newCollector(GroupSelector, Sort)}
  *
  * @lucene.experimental
  */
 @SuppressWarnings({"unchecked","rawtypes"})
 public abstract class AllGroupHeadsCollector<T> extends SimpleCollector {
 
+  private final GroupSelector<T> groupSelector;
+  protected final Sort sort;
+
   protected final int[] reversed;
   protected final int compIDXEnd;
-  protected final TemporalResult temporalResult;
 
-  protected AllGroupHeadsCollector(int numberOfSorts) {
-    this.reversed = new int[numberOfSorts];
-    this.compIDXEnd = numberOfSorts - 1;
-    temporalResult = new TemporalResult();
+  protected Map<T, GroupHead<T>> heads = new HashMap<>();
+
+  protected LeafReaderContext context;
+  protected Scorer scorer;
+
+  /**
+   * Create a new AllGroupHeadsCollector based on the type of within-group Sort required
+   * @param selector a GroupSelector to define the groups
+   * @param sort     the within-group sort to use to choose the group head document
+   * @param <T>      the group value type
+   */
+  public static <T> AllGroupHeadsCollector<T> newCollector(GroupSelector<T> selector, Sort sort) {
+    if (sort.equals(Sort.RELEVANCE))
+      return new ScoringGroupHeadsCollector<>(selector, sort);
+    return new SortingGroupHeadsCollector<>(selector, sort);
+  }
+
+  private AllGroupHeadsCollector(GroupSelector<T> selector, Sort sort) {
+    this.groupSelector = selector;
+    this.sort = sort;
+    this.reversed = new int[sort.getSort().length];
+    final SortField[] sortFields = sort.getSort();
+    for (int i = 0; i < sortFields.length; i++) {
+      reversed[i] = sortFields[i].getReverse() ? -1 : 1;
+    }
+    this.compIDXEnd = this.reversed.length - 1;
   }
 
   /**
@@ -79,34 +114,27 @@ public abstract class AllGroupHeadsCollector<T> extends SimpleCollector {
   }
 
   /**
-   * Returns the group head and puts it into {@link #temporalResult}.
-   * If the group head wasn't encountered before then it will be added to the collected group heads.
-   * <p>
-   * The {@link TemporalResult#stop} property will be <code>true</code> if the group head wasn't encountered before
-   * otherwise <code>false</code>.
-   *
-   * @param doc The document to retrieve the group head for.
-   * @throws IOException If I/O related errors occur
-   */
-  protected abstract void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException;
-
-  /**
    * Returns the collected group heads.
    * Subsequent calls should return the same group heads.
    *
    * @return the collected group heads
    */
-  protected abstract Collection<? extends GroupHead<T>> getCollectedGroupHeads();
+  protected Collection<? extends GroupHead<T>> getCollectedGroupHeads() {
+    return heads.values();
+  }
 
   @Override
   public void collect(int doc) throws IOException {
-    retrieveGroupHeadAndAddIfNotExist(doc);
-    if (temporalResult.stop) {
+    groupSelector.advanceTo(doc);
+    T groupValue = groupSelector.currentValue();
+    if (heads.containsKey(groupValue) == false) {
+      groupValue = groupSelector.copyValue();
+      heads.put(groupValue, newGroupHead(doc, groupValue, context, scorer));
       return;
     }
-    GroupHead<T> groupHead = temporalResult.groupHead;
 
-    // Ok now we need to check if the current doc is more relevant then current doc for this group
+    GroupHead<T> groupHead = heads.get(groupValue);
+    // Ok now we need to check if the current doc is more relevant than top doc for this group
     for (int compIDX = 0; ; compIDX++) {
       final int c = reversed[compIDX] * groupHead.compare(compIDX, doc);
       if (c < 0) {
@@ -125,18 +153,34 @@ public abstract class AllGroupHeadsCollector<T> extends SimpleCollector {
     groupHead.updateDocHead(doc);
   }
 
-  /**
-   * Contains the result of group head retrieval.
-   * To prevent new object creations of this class for every collect.
-   */
-  protected class TemporalResult {
+  @Override
+  public boolean needsScores() {
+    return sort.needsScores();
+  }
 
-    public GroupHead<T> groupHead;
-    public boolean stop;
+  @Override
+  protected void doSetNextReader(LeafReaderContext context) throws IOException {
+    groupSelector.setNextReader(context);
+    this.context = context;
+    for (GroupHead<T> head : heads.values()) {
+      head.setNextReader(context);
+    }
+  }
 
+  @Override
+  public void setScorer(Scorer scorer) throws IOException {
+    this.scorer = scorer;
+    for (GroupHead<T> head : heads.values()) {
+      head.setScorer(scorer);
+    }
   }
 
   /**
+   * Create a new GroupHead for the given group value, initialized with a doc, context and scorer
+   */
+  protected abstract GroupHead<T> newGroupHead(int doc, T value, LeafReaderContext context, Scorer scorer) throws IOException;
+
+  /**
    * Represents a group head. A group head is the most relevant document for a particular group.
    * The relevancy is based is usually based on the sort.
    *
@@ -147,12 +191,30 @@ public abstract class AllGroupHeadsCollector<T> extends SimpleCollector {
     public final T groupValue;
     public int doc;
 
-    protected GroupHead(T groupValue, int doc) {
+    protected int docBase;
+
+    /**
+     * Create a new GroupHead for the given value
+     */
+    protected GroupHead(T groupValue, int doc, int docBase) {
       this.groupValue = groupValue;
-      this.doc = doc;
+      this.doc = doc + docBase;
+      this.docBase = docBase;
+    }
+
+    /**
+     * Called for each segment
+     */
+    protected void setNextReader(LeafReaderContext ctx) throws IOException {
+      this.docBase = ctx.docBase;
     }
 
     /**
+     * Called for each segment
+     */
+    protected abstract void setScorer(Scorer scorer) throws IOException;
+
+    /**
      * Compares the specified document for a specified comparator against the current most relevant document.
      *
      * @param compIDX The comparator index of the specified comparator.
@@ -173,4 +235,117 @@ public abstract class AllGroupHeadsCollector<T> extends SimpleCollector {
 
   }
 
+  /**
+   * General implementation using a {@link FieldComparator} to select the group head
+   */
+  private static class SortingGroupHeadsCollector<T> extends AllGroupHeadsCollector<T> {
+
+    protected SortingGroupHeadsCollector(GroupSelector<T> selector, Sort sort) {
+      super(selector, sort);
+    }
+
+    @Override
+    protected GroupHead<T> newGroupHead(int doc, T value, LeafReaderContext ctx, Scorer scorer) throws IOException {
+      return new SortingGroupHead<>(sort, value, doc, ctx, scorer);
+    }
+  }
+
+  private static class SortingGroupHead<T> extends GroupHead<T> {
+
+    final FieldComparator[] comparators;
+    final LeafFieldComparator[] leafComparators;
+
+    protected SortingGroupHead(Sort sort, T groupValue, int doc, LeafReaderContext context, Scorer scorer) throws IOException {
+      super(groupValue, doc, context.docBase);
+      final SortField[] sortFields = sort.getSort();
+      comparators = new FieldComparator[sortFields.length];
+      leafComparators = new LeafFieldComparator[sortFields.length];
+      for (int i = 0; i < sortFields.length; i++) {
+        comparators[i] = sortFields[i].getComparator(1, i);
+        leafComparators[i] = comparators[i].getLeafComparator(context);
+        leafComparators[i].setScorer(scorer);
+        leafComparators[i].copy(0, doc);
+        leafComparators[i].setBottom(0);
+      }
+    }
+
+    @Override
+    public void setNextReader(LeafReaderContext ctx) throws IOException {
+      super.setNextReader(ctx);
+      for (int i = 0; i < comparators.length; i++) {
+        leafComparators[i] = comparators[i].getLeafComparator(ctx);
+      }
+    }
+
+    @Override
+    protected void setScorer(Scorer scorer) throws IOException {
+      for (LeafFieldComparator c : leafComparators) {
+        c.setScorer(scorer);
+      }
+    }
+
+    @Override
+    public int compare(int compIDX, int doc) throws IOException {
+      return leafComparators[compIDX].compareBottom(doc);
+    }
+
+    @Override
+    public void updateDocHead(int doc) throws IOException {
+      for (LeafFieldComparator comparator : leafComparators) {
+        comparator.copy(0, doc);
+        comparator.setBottom(0);
+      }
+      this.doc = doc + docBase;
+    }
+  }
+
+  /**
+   * Specialized implementation for sorting by score
+   */
+  private static class ScoringGroupHeadsCollector<T> extends AllGroupHeadsCollector<T> {
+
+    protected ScoringGroupHeadsCollector(GroupSelector<T> selector, Sort sort) {
+      super(selector, sort);
+    }
+
+    @Override
+    protected GroupHead<T> newGroupHead(int doc, T value, LeafReaderContext context, Scorer scorer) throws IOException {
+      return new ScoringGroupHead<>(scorer, value, doc, context.docBase);
+    }
+  }
+
+  private static class ScoringGroupHead<T> extends GroupHead<T> {
+
+    private Scorer scorer;
+    private float topScore;
+
+    protected ScoringGroupHead(Scorer scorer, T groupValue, int doc, int docBase) throws IOException {
+      super(groupValue, doc, docBase);
+      assert scorer.docID() == doc;
+      this.scorer = scorer;
+      this.topScore = scorer.score();
+    }
+
+    @Override
+    protected void setScorer(Scorer scorer) {
+      this.scorer = scorer;
+    }
+
+    @Override
+    protected int compare(int compIDX, int doc) throws IOException {
+      assert scorer.docID() == doc;
+      assert compIDX == 0;
+      float score = scorer.score();
+      int c = Float.compare(score, topScore);
+      if (c > 0)
+        topScore = score;
+      return c;
+    }
+
+    @Override
+    protected void updateDocHead(int doc) throws IOException {
+      this.doc = doc + docBase;
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupsCollector.java
index af697af..8434534 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupsCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AllGroupsCollector.java
@@ -18,23 +18,34 @@ package org.apache.lucene.search.grouping;
 
 import java.io.IOException;
 import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
 
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.SimpleCollector;
-import org.apache.lucene.util.BytesRef;
 
 /**
  * A collector that collects all groups that match the
  * query. Only the group value is collected, and the order
  * is undefined.  This collector does not determine
  * the most relevant document of a group.
- * <p>
- * This is an abstract version. Concrete implementations define
- * what a group actually is and how it is internally collected.
  *
  * @lucene.experimental
  */
-public abstract class AllGroupsCollector<T> extends SimpleCollector {
+public class AllGroupsCollector<T> extends SimpleCollector {
+
+  private final GroupSelector<T> groupSelector;
+
+  private final Set<T> groups = new HashSet<T>();
+
+  /**
+   * Create a new AllGroupsCollector
+   * @param groupSelector the GroupSelector to determine groups
+   */
+  public AllGroupsCollector(GroupSelector<T> groupSelector) {
+    this.groupSelector = groupSelector;
+  }
 
   /**
    * Returns the total number of groups for the executed search.
@@ -49,18 +60,31 @@ public abstract class AllGroupsCollector<T> extends SimpleCollector {
   /**
    * Returns the group values
    * <p>
-   * This is an unordered collections of group values. For each group that matched the query there is a {@link BytesRef}
-   * representing a group value.
+   * This is an unordered collections of group values.
    *
    * @return the group values
    */
-  public abstract Collection<T> getGroups();
+  public Collection<T> getGroups() {
+    return groups;
+  }
 
-  // Empty not necessary
   @Override
   public void setScorer(Scorer scorer) throws IOException {}
 
   @Override
+  protected void doSetNextReader(LeafReaderContext context) throws IOException {
+    groupSelector.setNextReader(context);
+  }
+
+  @Override
+  public void collect(int doc) throws IOException {
+    groupSelector.advanceTo(doc);
+    if (groups.contains(groupSelector.currentValue()))
+      return;
+    groups.add(groupSelector.copyValue());
+  }
+
+  @Override
   public boolean needsScores() {
     return false; // the result is unaffected by relevancy
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
index c965042..a50fda1 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
@@ -50,7 +50,7 @@ import org.apache.lucene.util.PriorityQueue;
  *  being that the documents in each group must always be
  *  indexed as a block.  This collector also fills in
  *  TopGroups.totalGroupCount without requiring the separate
- *  {@link org.apache.lucene.search.grouping.term.TermAllGroupsCollector}.  However, this collector does
+ *  {@link org.apache.lucene.search.grouping.AllGroupsCollector}.  However, this collector does
  *  not fill in the groupValue of each group; this field
  *  will always be null.
  *

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/DistinctValuesCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/DistinctValuesCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DistinctValuesCollector.java
index 54d752c..103b0d2 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/DistinctValuesCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/DistinctValuesCollector.java
@@ -16,10 +16,14 @@
  */
 package org.apache.lucene.search.grouping;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.SimpleCollector;
 
 /**
@@ -27,33 +31,99 @@ import org.apache.lucene.search.SimpleCollector;
  *
  * @lucene.experimental
  */
-public abstract class DistinctValuesCollector<T> extends SimpleCollector {
+public class DistinctValuesCollector<T, R> extends SecondPassGroupingCollector<T> {
+
+  /**
+   * Create a DistinctValuesCollector
+   * @param groupSelector the group selector to determine the top-level groups
+   * @param groups        the top-level groups to collect for
+   * @param valueSelector a group selector to determine which values to collect per-group
+   */
+  public DistinctValuesCollector(GroupSelector<T> groupSelector, Collection<SearchGroup<T>> groups,
+                                       GroupSelector<R> valueSelector) {
+    super(groupSelector, groups, new DistinctValuesReducer<>(valueSelector));
+  }
+
+  private static class ValuesCollector<R> extends SimpleCollector {
+
+    final GroupSelector<R> valueSelector;
+    final Set<R> values = new HashSet<>();
+
+    private ValuesCollector(GroupSelector<R> valueSelector) {
+      this.valueSelector = valueSelector;
+    }
+
+    @Override
+    public void collect(int doc) throws IOException {
+      if (valueSelector.advanceTo(doc) == GroupSelector.State.ACCEPT) {
+        R value = valueSelector.currentValue();
+        if (values.contains(value) == false)
+          values.add(valueSelector.copyValue());
+      }
+      else {
+        if (values.contains(null) == false)
+          values.add(null);
+      }
+    }
+
+    @Override
+    protected void doSetNextReader(LeafReaderContext context) throws IOException {
+      valueSelector.setNextReader(context);
+    }
+
+    @Override
+    public boolean needsScores() {
+      return false;
+    }
+  }
+
+  private static class DistinctValuesReducer<T, R> extends GroupReducer<T, ValuesCollector<R>> {
+
+    final GroupSelector<R> valueSelector;
+
+    private DistinctValuesReducer(GroupSelector<R> valueSelector) {
+      this.valueSelector = valueSelector;
+    }
+
+    @Override
+    public boolean needsScores() {
+      return false;
+    }
+
+    @Override
+    protected ValuesCollector<R> newCollector() {
+      return new ValuesCollector<>(valueSelector);
+    }
+  }
 
   /**
    * Returns all unique values for each top N group.
    *
    * @return all unique values for each top N group
    */
-  public abstract List<GroupCount<T>> getGroups();
+  public List<GroupCount<T, R>> getGroups() {
+    List<GroupCount<T, R>> counts = new ArrayList<>();
+    for (SearchGroup<T> group : groups) {
+      @SuppressWarnings("unchecked")
+      ValuesCollector<R> vc = (ValuesCollector<R>) groupReducer.getCollector(group.groupValue);
+      counts.add(new GroupCount<>(group.groupValue, vc.values));
+    }
+    return counts;
+  }
 
   /**
    * Returned by {@link DistinctValuesCollector#getGroups()},
    * representing the value and set of distinct values for the group.
    */
-  public static class GroupCount<T> {
+  public static class GroupCount<T, R> {
 
     public final T groupValue;
-    public final Set<T> uniqueValues;
+    public final Set<R> uniqueValues;
 
-    public GroupCount(T groupValue) {
+    public GroupCount(T groupValue, Set<R> values) {
       this.groupValue = groupValue;
-      this.uniqueValues = new HashSet<>();
+      this.uniqueValues = values;
     }
   }
 
-  @Override
-  public boolean needsScores() {
-    return false; // not needed to fetch all values
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java
index 02bb1a2..bd47adb 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/FirstPassGroupingCollector.java
@@ -33,15 +33,16 @@ import org.apache.lucene.search.SortField;
 
 /** FirstPassGroupingCollector is the first of two passes necessary
  *  to collect grouped hits.  This pass gathers the top N sorted
- *  groups. Concrete subclasses define what a group is and how it
- *  is internally collected.
+ *  groups. Groups are defined by a {@link GroupSelector}
  *
  *  <p>See {@link org.apache.lucene.search.grouping} for more
  *  details including a full code example.</p>
  *
  * @lucene.experimental
  */
-abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
+public class FirstPassGroupingCollector<T> extends SimpleCollector {
+
+  private final GroupSelector<T> groupSelector;
 
   private final FieldComparator<?>[] comparators;
   private final LeafFieldComparator[] leafComparators;
@@ -60,16 +61,18 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
   /**
    * Create the first pass collector.
    *
-   *  @param groupSort The {@link Sort} used to sort the
+   * @param groupSelector a GroupSelector used to defined groups
+   * @param groupSort The {@link Sort} used to sort the
    *    groups.  The top sorted document within each group
    *    according to groupSort, determines how that group
    *    sorts against other groups.  This must be non-null,
    *    ie, if you want to groupSort by relevance use
    *    Sort.RELEVANCE.
-   *  @param topNGroups How many top groups to keep.
+   * @param topNGroups How many top groups to keep.
    */
   @SuppressWarnings({"unchecked", "rawtypes"})
-  public FirstPassGroupingCollector(Sort groupSort, int topNGroups) {
+  public FirstPassGroupingCollector(GroupSelector<T> groupSelector, Sort groupSort, int topNGroups) {
+    this.groupSelector = groupSelector;
     if (topNGroups < 1) {
       throw new IllegalArgumentException("topNGroups must be >= 1 (got " + topNGroups + ")");
     }
@@ -133,7 +136,7 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
       if (upto++ < groupOffset) {
         continue;
       }
-      //System.out.println("  group=" + (group.groupValue == null ? "null" : group.groupValue.utf8ToString()));
+      // System.out.println("  group=" + (group.groupValue == null ? "null" : group.groupValue.toString()));
       SearchGroup<T> searchGroup = new SearchGroup<>();
       searchGroup.groupValue = group.groupValue;
       if (fillFields) {
@@ -155,14 +158,11 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
     }
   }
 
-  @Override
-  public void collect(int doc) throws IOException {
-    //System.out.println("FP.collect doc=" + doc);
-
+  private boolean isCompetitive(int doc) throws IOException {
     // If orderedGroups != null we already have collected N groups and
     // can short circuit by comparing this document to the bottom group,
     // without having to find what group this document belongs to.
-    
+
     // Even if this document belongs to a group in the top N, we'll know that
     // we don't have to update that group.
 
@@ -173,7 +173,7 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
         final int c = reversed[compIDX] * leafComparators[compIDX].compareBottom(doc);
         if (c < 0) {
           // Definitely not competitive. So don't even bother to continue
-          return;
+          return false;
         } else if (c > 0) {
           // Definitely competitive.
           break;
@@ -181,15 +181,24 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
           // Here c=0. If we're at the last comparator, this doc is not
           // competitive, since docs are visited in doc Id order, which means
           // this doc cannot compete with any other document in the queue.
-          return;
+          return false;
         }
       }
     }
+    return true;
+  }
+
+  @Override
+  public void collect(int doc) throws IOException {
+
+    if (isCompetitive(doc) == false)
+      return;
 
     // TODO: should we add option to mean "ignore docs that
     // don't have the group field" (instead of stuffing them
     // under null group)?
-    final T groupValue = getDocGroupValue(doc);
+    groupSelector.advanceTo(doc);
+    T groupValue = groupSelector.currentValue();
 
     final CollectedSearchGroup<T> group = groupMap.get(groupValue);
 
@@ -207,7 +216,7 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
 
         // Add a new CollectedSearchGroup:
         CollectedSearchGroup<T> sg = new CollectedSearchGroup<>();
-        sg.groupValue = copyDocGroupValue(groupValue, null);
+        sg.groupValue = groupSelector.copyValue();
         sg.comparatorSlot = groupMap.size();
         sg.topDoc = docBase + doc;
         for (LeafFieldComparator fc : leafComparators) {
@@ -233,7 +242,7 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
       groupMap.remove(bottomGroup.groupValue);
 
       // reuse the removed CollectedSearchGroup
-      bottomGroup.groupValue = copyDocGroupValue(groupValue, bottomGroup.groupValue);
+      bottomGroup.groupValue = groupSelector.copyValue();
       bottomGroup.topDoc = docBase + doc;
 
       for (LeafFieldComparator fc : leafComparators) {
@@ -338,25 +347,15 @@ abstract public class FirstPassGroupingCollector<T> extends SimpleCollector {
     for (int i=0; i<comparators.length; i++) {
       leafComparators[i] = comparators[i].getLeafComparator(readerContext);
     }
+    groupSelector.setNextReader(readerContext);
   }
 
   /**
-   * Returns the group value for the specified doc.
-   *
-   * @param doc The specified doc
-   * @return the group value for the specified doc
-   */
-  protected abstract T getDocGroupValue(int doc) throws IOException;
-
-  /**
-   * Returns a copy of the specified group value by creating a new instance and copying the value from the specified
-   * groupValue in the new instance. Or optionally the reuse argument can be used to copy the group value in.
-   *
-   * @param groupValue The group value to copy
-   * @param reuse Optionally a reuse instance to prevent a new instance creation
-   * @return a copy of the specified group value
+   * @return the GroupSelector used for this Collector
    */
-  protected abstract T copyDocGroupValue(T groupValue, T reuse);
+  public GroupSelector<T> getGroupSelector() {
+    return groupSelector;
+  }
 
 }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupReducer.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupReducer.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupReducer.java
new file mode 100644
index 0000000..4366e91
--- /dev/null
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupReducer.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.Scorer;
+
+/**
+ * Concrete implementations of this class define what to collect for individual
+ * groups during the second-pass of a grouping search.
+ *
+ * Each group is assigned a Collector returned by {@link #newCollector()}, and
+ * {@link LeafCollector#collect(int)} is called for each document that is in
+ * a group
+ *
+ * @see SecondPassGroupingCollector
+ *
+ * @param <T> the type of the value used for grouping
+ * @param <C> the type of {@link Collector} used to reduce each group
+ */
+public abstract class GroupReducer<T, C extends Collector> {
+
+  private final Map<T, GroupCollector<C>> groups = new HashMap<>();
+
+  /**
+   * Define which groups should be reduced.
+   *
+   * Called by {@link SecondPassGroupingCollector}
+   */
+  public void setGroups(Collection<SearchGroup<T>> groups) {
+    for (SearchGroup<T> group : groups) {
+      this.groups.put(group.groupValue, new GroupCollector<>(newCollector()));
+    }
+  }
+
+  /**
+   * Whether or not this reducer requires collected documents to be scored
+   */
+  public abstract boolean needsScores();
+
+  /**
+   * Creates a new Collector for each group
+   */
+  protected abstract C newCollector();
+
+  /**
+   * Get the Collector for a given group
+   */
+  public final C getCollector(T value) {
+    return groups.get(value).collector;
+  }
+
+  /**
+   * Collect a given document into a given group
+   * @throws IOException on error
+   */
+  public final void collect(T value, int doc) throws IOException {
+    GroupCollector<C> collector = groups.get(value);
+    collector.leafCollector.collect(doc);
+  }
+
+  /**
+   * Set the Scorer on all group collectors
+   */
+  public final void setScorer(Scorer scorer) throws IOException {
+    for (GroupCollector<C> collector : groups.values()) {
+      collector.leafCollector.setScorer(scorer);
+    }
+  }
+
+  /**
+   * Called when the parent {@link SecondPassGroupingCollector} moves to a new segment
+   */
+  public final void setNextReader(LeafReaderContext ctx) throws IOException {
+    for (GroupCollector<C> collector : groups.values()) {
+      collector.leafCollector = collector.collector.getLeafCollector(ctx);
+    }
+  }
+
+  private static final class GroupCollector<C extends Collector> {
+
+    final C collector;
+    LeafCollector leafCollector;
+
+    private GroupCollector(C collector) {
+      this.collector = collector;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java
new file mode 100644
index 0000000..dbb0932
--- /dev/null
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupSelector.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.io.IOException;
+import java.util.Collection;
+
+import org.apache.lucene.index.LeafReaderContext;
+
+/**
+ * Defines a group, for use by grouping collectors
+ *
+ * A GroupSelector acts as an iterator over documents.  For each segment, clients
+ * should call {@link #setNextReader(LeafReaderContext)}, and then {@link #advanceTo(int)}
+ * for each matching document.
+ *
+ * @param <T> the type of the group value
+ */
+public abstract class GroupSelector<T> {
+
+  /**
+   * What to do with the current value
+   */
+  public enum State { SKIP, ACCEPT }
+
+  /**
+   * Set the LeafReaderContext
+   */
+  public abstract void setNextReader(LeafReaderContext readerContext) throws IOException;
+
+  /**
+   * Advance the GroupSelector's iterator to the given document
+   */
+  public abstract State advanceTo(int doc) throws IOException;
+
+  /**
+   * Get the group value of the current document
+   *
+   * N.B. this object may be reused, for a persistent version use {@link #copyValue()}
+   */
+  public abstract T currentValue();
+
+  /**
+   * @return a copy of the group value of the current document
+   */
+  public abstract T copyValue();
+
+  /**
+   * Set a restriction on the group values returned by this selector
+   *
+   * If the selector is positioned on a document whose group value is not contained
+   * within this set, then {@link #advanceTo(int)} will return {@link State#SKIP}
+   *
+   * @param groups a set of {@link SearchGroup} objects to limit selections to
+   */
+  public abstract void setGroups(Collection<SearchGroup<T>> groups);
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/Grouper.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/Grouper.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/Grouper.java
deleted file mode 100644
index 2ff79a1..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/Grouper.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.lucene.search.grouping;
-
-import java.io.IOException;
-import java.util.Collection;
-
-import org.apache.lucene.search.Sort;
-
-/**
- * A factory object to create first and second-pass collectors, run by a {@link GroupingSearch}
- * @param <T> the type the group value
- */
-public abstract class Grouper<T> {
-
-  /**
-   * Create a first-pass collector
-   * @param sort  the order in which groups should be returned
-   * @param count how many groups to return
-   */
-  public abstract FirstPassGroupingCollector<T> getFirstPassCollector(Sort sort, int count) throws IOException;
-
-  /**
-   * Create an {@link AllGroupsCollector}
-   */
-  public abstract AllGroupsCollector<T> getAllGroupsCollector();
-
-  /**
-   * Create an {@link AllGroupHeadsCollector}
-   * @param sort a within-group sort order to determine which doc is the group head
-   */
-  public abstract AllGroupHeadsCollector<T> getGroupHeadsCollector(Sort sort);
-
-  /**
-   * Create a second-pass collector
-   */
-  public abstract SecondPassGroupingCollector<T> getSecondPassCollector(
-      Collection<SearchGroup<T>> groups, Sort groupSort, Sort withinGroupSort,
-      int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java
index f4319d5..a36917d 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java
@@ -30,8 +30,6 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.Weight;
-import org.apache.lucene.search.grouping.function.FunctionGrouper;
-import org.apache.lucene.search.grouping.term.TermGrouper;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.mutable.MutableValue;
@@ -43,7 +41,7 @@ import org.apache.lucene.util.mutable.MutableValue;
  */
 public class GroupingSearch {
 
-  private final Grouper grouper;
+  private final GroupSelector grouper;
   private final Query groupEndDocs;
 
   private Sort groupSort = Sort.RELEVANCE;
@@ -71,11 +69,7 @@ public class GroupingSearch {
    * @param groupField The name of the field to group by.
    */
   public GroupingSearch(String groupField) {
-    this(new TermGrouper(groupField, 128), null);
-  }
-
-  public GroupingSearch(String groupField, int initialSize) {
-    this(new TermGrouper(groupField, initialSize), null);
+    this(new TermGroupSelector(groupField), null);
   }
 
   /**
@@ -86,7 +80,7 @@ public class GroupingSearch {
    * @param valueSourceContext The context of the specified groupFunction
    */
   public GroupingSearch(ValueSource groupFunction, Map<?, ?> valueSourceContext) {
-    this(new FunctionGrouper(groupFunction, valueSourceContext), null);
+    this(new ValueSourceGroupSelector(groupFunction, valueSourceContext), null);
   }
 
   /**
@@ -99,7 +93,7 @@ public class GroupingSearch {
     this(null, groupEndDocs);
   }
 
-  private GroupingSearch(Grouper grouper, Query groupEndDocs) {
+  private GroupingSearch(GroupSelector grouper, Query groupEndDocs) {
     this.grouper = grouper;
     this.groupEndDocs = groupEndDocs;
   }
@@ -129,10 +123,10 @@ public class GroupingSearch {
   protected TopGroups groupByFieldOrFunction(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException {
     int topN = groupOffset + groupLimit;
 
-    final FirstPassGroupingCollector firstPassCollector = grouper.getFirstPassCollector(groupSort, topN);
-    final AllGroupsCollector allGroupsCollector = allGroups ? grouper.getAllGroupsCollector() : null;
+    final FirstPassGroupingCollector firstPassCollector = new FirstPassGroupingCollector(grouper, groupSort, topN);
+    final AllGroupsCollector allGroupsCollector = allGroups ? new AllGroupsCollector(grouper) : null;
     final AllGroupHeadsCollector allGroupHeadsCollector
-        = allGroupHeads ? grouper.getGroupHeadsCollector(sortWithinGroup) : null;
+        = allGroupHeads ? AllGroupHeadsCollector.newCollector(grouper, sortWithinGroup) : null;
 
     final Collector firstRound = MultiCollector.wrap(firstPassCollector, allGroupsCollector, allGroupHeadsCollector);
 
@@ -158,8 +152,8 @@ public class GroupingSearch {
     }
 
     int topNInsideGroup = groupDocsOffset + groupDocsLimit;
-    SecondPassGroupingCollector secondPassCollector
-        = grouper.getSecondPassCollector(topSearchGroups, groupSort, sortWithinGroup, topNInsideGroup,
+    TopGroupsCollector secondPassCollector
+        = new TopGroupsCollector(grouper, topSearchGroups, groupSort, sortWithinGroup, topNInsideGroup,
                                          includeScores, includeMaxScore, fillSortFields);
 
     if (cachedCollector != null && cachedCollector.isCached()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java
index f8feb75..c54c8ee 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/SecondPassGroupingCollector.java
@@ -18,152 +18,82 @@ package org.apache.lucene.search.grouping;
 
 import java.io.IOException;
 import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Objects;
 
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.LeafCollector;
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.SimpleCollector;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.TopDocsCollector;
-import org.apache.lucene.search.TopFieldCollector;
-import org.apache.lucene.search.TopScoreDocCollector;
 
 /**
- * SecondPassGroupingCollector is the second of two passes
- * necessary to collect grouped docs.  This pass gathers the
- * top N documents per top group computed from the
- * first pass. Concrete subclasses define what a group is and how it
- * is internally collected.
+ * SecondPassGroupingCollector runs over an already collected set of
+ * groups, further applying a {@link GroupReducer} to each group
  *
- * <p>See {@link org.apache.lucene.search.grouping} for more
- * details including a full code example.</p>
+ * @see TopGroupsCollector
+ * @see DistinctValuesCollector
  *
  * @lucene.experimental
  */
-public abstract class SecondPassGroupingCollector<T> extends SimpleCollector {
+public class SecondPassGroupingCollector<T> extends SimpleCollector {
 
-  private final Collection<SearchGroup<T>> groups;
-  private final Sort groupSort;
-  private final Sort withinGroupSort;
-  private final int maxDocsPerGroup;
-  private final boolean needsScores;
-  protected final Map<T, SearchGroupDocs<T>> groupMap;
+  protected final GroupSelector<T> groupSelector;
+  protected final Collection<SearchGroup<T>> groups;
+  protected final GroupReducer<T, ?> groupReducer;
 
-  protected SearchGroupDocs<T>[] groupDocs;
+  protected int totalHitCount;
+  protected int totalGroupedHitCount;
 
-  private int totalHitCount;
-  private int totalGroupedHitCount;
-
-  public SecondPassGroupingCollector(Collection<SearchGroup<T>> groups, Sort groupSort, Sort withinGroupSort,
-                                     int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields)
-    throws IOException {
+  /**
+   * Create a new SecondPassGroupingCollector
+   * @param groupSelector   the GroupSelector that defines groups for this search
+   * @param groups          the groups to collect documents for
+   * @param reducer         the reducer to apply to each group
+   */
+  public SecondPassGroupingCollector(GroupSelector<T> groupSelector, Collection<SearchGroup<T>> groups, GroupReducer<T, ?> reducer) {
 
     //System.out.println("SP init");
     if (groups.isEmpty()) {
       throw new IllegalArgumentException("no groups to collect (groups is empty)");
     }
 
+    this.groupSelector = Objects.requireNonNull(groupSelector);
+    this.groupSelector.setGroups(groups);
+
     this.groups = Objects.requireNonNull(groups);
-    this.groupSort = Objects.requireNonNull(groupSort);
-    this.withinGroupSort = Objects.requireNonNull(withinGroupSort);
-    this.maxDocsPerGroup = maxDocsPerGroup;
-    this.needsScores = getScores || getMaxScores || withinGroupSort.needsScores();
+    this.groupReducer = reducer;
+    reducer.setGroups(groups);
+  }
 
-    this.groupMap = new HashMap<>(groups.size());
-    for (SearchGroup<T> group : groups) {
-      //System.out.println("  prep group=" + (group.groupValue == null ? "null" : group.groupValue.utf8ToString()));
-      final TopDocsCollector<?> collector;
-      if (withinGroupSort.equals(Sort.RELEVANCE)) { // optimize to use TopScoreDocCollector
-        // Sort by score
-        collector = TopScoreDocCollector.create(maxDocsPerGroup);
-      } else {
-        // Sort by fields
-        collector = TopFieldCollector.create(withinGroupSort, maxDocsPerGroup, fillSortFields, getScores, getMaxScores);
-      }
-      groupMap.put(group.groupValue, new SearchGroupDocs<>(group.groupValue, collector));
-    }
+  /**
+   * @return the GroupSelector used in this collector
+   */
+  public GroupSelector<T> getGroupSelector() {
+    return groupSelector;
   }
 
   @Override
   public boolean needsScores() {
-    return needsScores;
+    return groupReducer.needsScores();
   }
 
   @Override
   public void setScorer(Scorer scorer) throws IOException {
-    for (SearchGroupDocs<T> group : groupMap.values()) {
-      group.leafCollector.setScorer(scorer);
-    }
+    groupReducer.setScorer(scorer);
   }
 
   @Override
   public void collect(int doc) throws IOException {
     totalHitCount++;
-    SearchGroupDocs<T> group = retrieveGroup(doc);
-    if (group != null) {
-      totalGroupedHitCount++;
-      group.leafCollector.collect(doc);
-    }
+    if (groupSelector.advanceTo(doc) == GroupSelector.State.SKIP)
+      return;
+    totalGroupedHitCount++;
+    T value = groupSelector.currentValue();
+    groupReducer.collect(value, doc);
   }
 
-  /**
-   * Returns the group the specified doc belongs to or <code>null</code> if no group could be retrieved.
-   *
-   * @param doc The specified doc
-   * @return the group the specified doc belongs to or <code>null</code> if no group could be retrieved
-   * @throws IOException If an I/O related error occurred
-   */
-  protected abstract SearchGroupDocs<T> retrieveGroup(int doc) throws IOException;
-
   @Override
   protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
-    //System.out.println("SP.setNextReader");
-    for (SearchGroupDocs<T> group : groupMap.values()) {
-      group.leafCollector = group.collector.getLeafCollector(readerContext);
-    }
-  }
-
-  public TopGroups<T> getTopGroups(int withinGroupOffset) {
-    @SuppressWarnings({"unchecked","rawtypes"})
-    final GroupDocs<T>[] groupDocsResult = (GroupDocs<T>[]) new GroupDocs[groups.size()];
-
-    int groupIDX = 0;
-    float maxScore = Float.MIN_VALUE;
-    for(SearchGroup<?> group : groups) {
-      final SearchGroupDocs<T> groupDocs = groupMap.get(group.groupValue);
-      final TopDocs topDocs = groupDocs.collector.topDocs(withinGroupOffset, maxDocsPerGroup);
-      groupDocsResult[groupIDX++] = new GroupDocs<>(Float.NaN,
-                                                                    topDocs.getMaxScore(),
-                                                                    topDocs.totalHits,
-                                                                    topDocs.scoreDocs,
-                                                                    groupDocs.groupValue,
-                                                                    group.sortValues);
-      maxScore = Math.max(maxScore, topDocs.getMaxScore());
-    }
-
-    return new TopGroups<>(groupSort.getSort(),
-                                           withinGroupSort.getSort(),
-                                           totalHitCount, totalGroupedHitCount, groupDocsResult,
-                                           maxScore);
+    groupReducer.setNextReader(readerContext);
+    groupSelector.setNextReader(readerContext);
   }
 
-
-  // TODO: merge with SearchGroup or not?
-  // ad: don't need to build a new hashmap
-  // disad: blows up the size of SearchGroup if we need many of them, and couples implementations
-  public class SearchGroupDocs<T> {
-
-    public final T groupValue;
-    public final TopDocsCollector<?> collector;
-    public LeafCollector leafCollector;
-
-    public SearchGroupDocs(T groupValue, TopDocsCollector<?> collector) {
-      this.groupValue = groupValue;
-      this.collector = collector;
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupFacetCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupFacetCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupFacetCollector.java
new file mode 100644
index 0000000..39d28a5
--- /dev/null
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupFacetCollector.java
@@ -0,0 +1,414 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search.grouping;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
+import org.apache.lucene.util.SentinelIntSet;
+import org.apache.lucene.util.UnicodeUtil;
+
+/**
+ * An implementation of {@link GroupFacetCollector} that computes grouped facets based on the indexed terms
+ * from DocValues.
+ *
+ * @lucene.experimental
+ */
+public abstract class TermGroupFacetCollector extends GroupFacetCollector {
+
+  final List<GroupedFacetHit> groupedFacetHits;
+  final SentinelIntSet segmentGroupedFacetHits;
+
+  SortedDocValues groupFieldTermsIndex;
+
+  /**
+   * Factory method for creating the right implementation based on the fact whether the facet field contains
+   * multiple tokens per documents.
+   *
+   * @param groupField The group field
+   * @param facetField The facet field
+   * @param facetFieldMultivalued Whether the facet field has multiple tokens per document
+   * @param facetPrefix The facet prefix a facet entry should start with to be included.
+   * @param initialSize The initial allocation size of the internal int set and group facet list which should roughly
+   *                    match the total number of expected unique groups. Be aware that the heap usage is
+   *                    4 bytes * initialSize.
+   * @return <code>TermGroupFacetCollector</code> implementation
+   */
+  public static TermGroupFacetCollector createTermGroupFacetCollector(String groupField,
+                                                                      String facetField,
+                                                                      boolean facetFieldMultivalued,
+                                                                      BytesRef facetPrefix,
+                                                                      int initialSize) {
+    if (facetFieldMultivalued) {
+      return new MV(groupField, facetField, facetPrefix, initialSize);
+    } else {
+      return new SV(groupField, facetField, facetPrefix, initialSize);
+    }
+  }
+
+  TermGroupFacetCollector(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
+    super(groupField, facetField, facetPrefix);
+    groupedFacetHits = new ArrayList<>(initialSize);
+    segmentGroupedFacetHits = new SentinelIntSet(initialSize, Integer.MIN_VALUE);
+  }
+
+  // Implementation for single valued facet fields.
+  static class SV extends TermGroupFacetCollector {
+
+    private SortedDocValues facetFieldTermsIndex;
+
+    SV(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
+      super(groupField, facetField, facetPrefix, initialSize);
+    }
+
+    @Override
+    public void collect(int doc) throws IOException {
+      if (doc > facetFieldTermsIndex.docID()) {
+        facetFieldTermsIndex.advance(doc);
+      }
+
+      int facetOrd;
+      if (doc == facetFieldTermsIndex.docID()) {
+        facetOrd = facetFieldTermsIndex.ordValue();
+      } else {
+        facetOrd = -1;
+      }
+      
+      if (facetOrd < startFacetOrd || facetOrd >= endFacetOrd) {
+        return;
+      }
+
+      if (doc > groupFieldTermsIndex.docID()) {
+        groupFieldTermsIndex.advance(doc);
+      }
+
+      int groupOrd;
+      if (doc == groupFieldTermsIndex.docID()) {
+        groupOrd = groupFieldTermsIndex.ordValue();
+      } else {
+        groupOrd = -1;
+      }
+      int segmentGroupedFacetsIndex = groupOrd * (facetFieldTermsIndex.getValueCount()+1) + facetOrd;
+      if (segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
+        return;
+      }
+
+      segmentTotalCount++;
+      segmentFacetCounts[facetOrd+1]++;
+
+      segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
+
+      BytesRef groupKey;
+      if (groupOrd == -1) {
+        groupKey = null;
+      } else {
+        groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
+      }
+
+      BytesRef facetKey;
+      if (facetOrd == -1) {
+        facetKey = null;
+      } else {
+        facetKey = BytesRef.deepCopyOf(facetFieldTermsIndex.lookupOrd(facetOrd));
+      }
+
+      groupedFacetHits.add(new GroupedFacetHit(groupKey, facetKey));
+    }
+
+    @Override
+    protected void doSetNextReader(LeafReaderContext context) throws IOException {
+      if (segmentFacetCounts != null) {
+        segmentResults.add(createSegmentResult());
+      }
+
+      groupFieldTermsIndex = DocValues.getSorted(context.reader(), groupField);
+      facetFieldTermsIndex = DocValues.getSorted(context.reader(), facetField);
+
+      // 1+ to allow for the -1 "not set":
+      segmentFacetCounts = new int[facetFieldTermsIndex.getValueCount()+1];
+      segmentTotalCount = 0;
+
+      segmentGroupedFacetHits.clear();
+      for (GroupedFacetHit groupedFacetHit : groupedFacetHits) {
+        int facetOrd = groupedFacetHit.facetValue == null ? -1 : facetFieldTermsIndex.lookupTerm(groupedFacetHit.facetValue);
+        if (groupedFacetHit.facetValue != null && facetOrd < 0) {
+          continue;
+        }
+
+        int groupOrd = groupedFacetHit.groupValue == null ? -1 : groupFieldTermsIndex.lookupTerm(groupedFacetHit.groupValue);
+        if (groupedFacetHit.groupValue != null && groupOrd < 0) {
+          continue;
+        }
+
+        int segmentGroupedFacetsIndex = groupOrd * (facetFieldTermsIndex.getValueCount()+1) + facetOrd;
+        segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
+      }
+
+      if (facetPrefix != null) {
+        startFacetOrd = facetFieldTermsIndex.lookupTerm(facetPrefix);
+        if (startFacetOrd < 0) {
+          // Points to the ord one higher than facetPrefix
+          startFacetOrd = -startFacetOrd - 1;
+        }
+        BytesRefBuilder facetEndPrefix = new BytesRefBuilder();
+        facetEndPrefix.append(facetPrefix);
+        facetEndPrefix.append(UnicodeUtil.BIG_TERM);
+        endFacetOrd = facetFieldTermsIndex.lookupTerm(facetEndPrefix.get());
+        assert endFacetOrd < 0;
+        endFacetOrd = -endFacetOrd - 1; // Points to the ord one higher than facetEndPrefix
+      } else {
+        startFacetOrd = -1;
+        endFacetOrd = facetFieldTermsIndex.getValueCount();
+      }
+    }
+
+    @Override
+    protected SegmentResult createSegmentResult() throws IOException {
+      return new SegmentResult(segmentFacetCounts, segmentTotalCount, facetFieldTermsIndex.termsEnum(), startFacetOrd, endFacetOrd);
+    }
+
+    private static class SegmentResult extends GroupFacetCollector.SegmentResult {
+
+      final TermsEnum tenum;
+
+      SegmentResult(int[] counts, int total, TermsEnum tenum, int startFacetOrd, int endFacetOrd) throws IOException {
+        super(counts, total - counts[0], counts[0], endFacetOrd+1);
+        this.tenum = tenum;
+        this.mergePos = startFacetOrd == -1 ? 1 : startFacetOrd+1;
+        if (mergePos < maxTermPos) {
+          assert tenum != null;
+          tenum.seekExact(startFacetOrd == -1 ? 0 : startFacetOrd);
+          mergeTerm = tenum.term();
+        }
+      }
+
+      @Override
+      protected void nextTerm() throws IOException {
+        mergeTerm = tenum.next();
+      }
+    }
+  }
+
+  // Implementation for multi valued facet fields.
+  static class MV extends TermGroupFacetCollector {
+
+    private SortedSetDocValues facetFieldDocTermOrds;
+    private TermsEnum facetOrdTermsEnum;
+    private int facetFieldNumTerms;
+
+    MV(String groupField, String facetField, BytesRef facetPrefix, int initialSize) {
+      super(groupField, facetField, facetPrefix, initialSize);
+    }
+
+    @Override
+    public void collect(int doc) throws IOException {
+      if (doc > groupFieldTermsIndex.docID()) {
+        groupFieldTermsIndex.advance(doc);
+      }
+
+      int groupOrd;
+      if (doc == groupFieldTermsIndex.docID()) {
+        groupOrd = groupFieldTermsIndex.ordValue();
+      } else {
+        groupOrd = -1;
+      }
+      
+      if (facetFieldNumTerms == 0) {
+        int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1);
+        if (facetPrefix != null || segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
+          return;
+        }
+
+        segmentTotalCount++;
+        segmentFacetCounts[facetFieldNumTerms]++;
+
+        segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
+        BytesRef groupKey;
+        if (groupOrd == -1) {
+          groupKey = null;
+        } else {
+          groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
+        }
+        groupedFacetHits.add(new GroupedFacetHit(groupKey, null));
+        return;
+      }
+
+      if (doc > facetFieldDocTermOrds.docID()) {
+        facetFieldDocTermOrds.advance(doc);
+      }
+      boolean empty = true;
+      if (doc == facetFieldDocTermOrds.docID()) {
+        long ord;
+        while ((ord = facetFieldDocTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+          process(groupOrd, (int) ord);
+          empty = false;
+        }
+      }
+      
+      if (empty) {
+        process(groupOrd, facetFieldNumTerms); // this facet ord is reserved for docs not containing facet field.
+      }
+    }
+    
+    private void process(int groupOrd, int facetOrd) throws IOException {
+      if (facetOrd < startFacetOrd || facetOrd >= endFacetOrd) {
+        return;
+      }
+
+      int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1) + facetOrd;
+      if (segmentGroupedFacetHits.exists(segmentGroupedFacetsIndex)) {
+        return;
+      }
+
+      segmentTotalCount++;
+      segmentFacetCounts[facetOrd]++;
+
+      segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
+
+      BytesRef groupKey;
+      if (groupOrd == -1) {
+        groupKey = null;
+      } else {
+        groupKey = BytesRef.deepCopyOf(groupFieldTermsIndex.lookupOrd(groupOrd));
+      }
+
+      final BytesRef facetValue;
+      if (facetOrd == facetFieldNumTerms) {
+        facetValue = null;
+      } else {
+        facetValue = BytesRef.deepCopyOf(facetFieldDocTermOrds.lookupOrd(facetOrd));
+      }
+      groupedFacetHits.add(new GroupedFacetHit(groupKey, facetValue));
+    }
+
+    @Override
+    protected void doSetNextReader(LeafReaderContext context) throws IOException {
+      if (segmentFacetCounts != null) {
+        segmentResults.add(createSegmentResult());
+      }
+
+      groupFieldTermsIndex = DocValues.getSorted(context.reader(), groupField);
+      facetFieldDocTermOrds = DocValues.getSortedSet(context.reader(), facetField);
+      facetFieldNumTerms = (int) facetFieldDocTermOrds.getValueCount();
+      if (facetFieldNumTerms == 0) {
+        facetOrdTermsEnum = null;
+      } else {
+        facetOrdTermsEnum = facetFieldDocTermOrds.termsEnum();
+      }
+      // [facetFieldNumTerms() + 1] for all possible facet values and docs not containing facet field
+      segmentFacetCounts = new int[facetFieldNumTerms + 1];
+      segmentTotalCount = 0;
+
+      segmentGroupedFacetHits.clear();
+      for (GroupedFacetHit groupedFacetHit : groupedFacetHits) {
+        int groupOrd = groupedFacetHit.groupValue == null ? -1 : groupFieldTermsIndex.lookupTerm(groupedFacetHit.groupValue);
+        if (groupedFacetHit.groupValue != null && groupOrd < 0) {
+          continue;
+        }
+
+        int facetOrd;
+        if (groupedFacetHit.facetValue != null) {
+          if (facetOrdTermsEnum == null || !facetOrdTermsEnum.seekExact(groupedFacetHit.facetValue)) {
+            continue;
+          }
+          facetOrd = (int) facetOrdTermsEnum.ord();
+        } else {
+          facetOrd = facetFieldNumTerms;
+        }
+
+        // (facetFieldDocTermOrds.numTerms() + 1) for all possible facet values and docs not containing facet field
+        int segmentGroupedFacetsIndex = groupOrd * (facetFieldNumTerms + 1) + facetOrd;
+        segmentGroupedFacetHits.put(segmentGroupedFacetsIndex);
+      }
+
+      if (facetPrefix != null) {
+        TermsEnum.SeekStatus seekStatus;
+        if (facetOrdTermsEnum != null) {
+          seekStatus = facetOrdTermsEnum.seekCeil(facetPrefix);
+        } else {
+          seekStatus = TermsEnum.SeekStatus.END;
+        }
+
+        if (seekStatus != TermsEnum.SeekStatus.END) {
+          startFacetOrd = (int) facetOrdTermsEnum.ord();
+        } else {
+          startFacetOrd = 0;
+          endFacetOrd = 0;
+          return;
+        }
+
+        BytesRefBuilder facetEndPrefix = new BytesRefBuilder();
+        facetEndPrefix.append(facetPrefix);
+        facetEndPrefix.append(UnicodeUtil.BIG_TERM);
+        seekStatus = facetOrdTermsEnum.seekCeil(facetEndPrefix.get());
+        if (seekStatus != TermsEnum.SeekStatus.END) {
+          endFacetOrd = (int) facetOrdTermsEnum.ord();
+        } else {
+          endFacetOrd = facetFieldNumTerms; // Don't include null...
+        }
+      } else {
+        startFacetOrd = 0;
+        endFacetOrd = facetFieldNumTerms + 1;
+      }
+    }
+
+    @Override
+    protected SegmentResult createSegmentResult() throws IOException {
+      return new SegmentResult(segmentFacetCounts, segmentTotalCount, facetFieldNumTerms, facetOrdTermsEnum, startFacetOrd, endFacetOrd);
+    }
+
+    private static class SegmentResult extends GroupFacetCollector.SegmentResult {
+
+      final TermsEnum tenum;
+
+      SegmentResult(int[] counts, int total, int missingCountIndex, TermsEnum tenum, int startFacetOrd, int endFacetOrd) throws IOException {
+        super(counts, total - counts[missingCountIndex], counts[missingCountIndex],
+            endFacetOrd == missingCountIndex + 1 ?  missingCountIndex : endFacetOrd);
+        this.tenum = tenum;
+        this.mergePos = startFacetOrd;
+        if (tenum != null) {
+          tenum.seekExact(mergePos);
+          mergeTerm = tenum.term();
+        }
+      }
+
+      @Override
+      protected void nextTerm() throws IOException {
+        mergeTerm = tenum.next();
+      }
+    }
+  }
+
+  private static class GroupedFacetHit {
+
+    final BytesRef groupValue;
+    final BytesRef facetValue;
+
+    GroupedFacetHit(BytesRef groupValue, BytesRef facetValue) {
+      this.groupValue = groupValue;
+      this.facetValue = facetValue;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java
new file mode 100644
index 0000000..5b8f77c
--- /dev/null
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefHash;
+
+/**
+ * A GroupSelector implementation that groups via SortedDocValues
+ */
+public class TermGroupSelector extends GroupSelector<BytesRef> {
+
+  private final String field;
+  private final BytesRefHash values = new BytesRefHash();
+  private final Map<Integer, Integer> ordsToGroupIds = new HashMap<>();
+
+  private SortedDocValues docValues;
+  private int groupId;
+
+  private boolean secondPass;
+  private boolean includeEmpty;
+
+  /**
+   * Create a new TermGroupSelector
+   * @param field the SortedDocValues field to use for grouping
+   */
+  public TermGroupSelector(String field) {
+    this.field = field;
+  }
+
+  @Override
+  public void setNextReader(LeafReaderContext readerContext) throws IOException {
+    this.docValues = DocValues.getSorted(readerContext.reader(), field);
+    this.ordsToGroupIds.clear();
+    BytesRef scratch = new BytesRef();
+    for (int i = 0; i < values.size(); i++) {
+      values.get(i, scratch);
+      int ord = this.docValues.lookupTerm(scratch);
+      if (ord >= 0)
+        ordsToGroupIds.put(ord, i);
+    }
+  }
+
+  @Override
+  public State advanceTo(int doc) throws IOException {
+    if (this.docValues.advanceExact(doc) == false) {
+      groupId = -1;
+      return includeEmpty ? State.ACCEPT : State.SKIP;
+    }
+    int ord = docValues.ordValue();
+    if (ordsToGroupIds.containsKey(ord)) {
+      groupId = ordsToGroupIds.get(ord);
+      return State.ACCEPT;
+    }
+    if (secondPass)
+      return State.SKIP;
+    groupId = values.add(docValues.binaryValue());
+    ordsToGroupIds.put(ord, groupId);
+    return State.ACCEPT;
+  }
+
+  private BytesRef scratch = new BytesRef();
+
+  @Override
+  public BytesRef currentValue() {
+    if (groupId == -1)
+      return null;
+    values.get(groupId, scratch);
+    return scratch;
+  }
+
+  @Override
+  public BytesRef copyValue() {
+    if (groupId == -1)
+      return null;
+    return BytesRef.deepCopyOf(currentValue());
+  }
+
+  @Override
+  public void setGroups(Collection<SearchGroup<BytesRef>> searchGroups) {
+    this.values.clear();
+    this.values.reinit();
+    for (SearchGroup<BytesRef> sg : searchGroups) {
+      if (sg.groupValue == null)
+        includeEmpty = true;
+      else
+        this.values.add(sg.groupValue);
+    }
+    this.secondPass = true;
+  }
+}


[21/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/TopGroupsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/TopGroupsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TopGroupsCollector.java
new file mode 100644
index 0000000..b6c71d3
--- /dev/null
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TopGroupsCollector.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.util.Collection;
+import java.util.Objects;
+import java.util.function.Supplier;
+
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TopDocsCollector;
+import org.apache.lucene.search.TopFieldCollector;
+import org.apache.lucene.search.TopScoreDocCollector;
+
+/**
+ * A second-pass collector that collects the TopDocs for each group, and
+ * returns them as a {@link TopGroups} object
+ *
+ * @param <T> the type of the group value
+ */
+public class TopGroupsCollector<T> extends SecondPassGroupingCollector<T> {
+
+  private final Sort groupSort;
+  private final Sort withinGroupSort;
+  private final int maxDocsPerGroup;
+
+  /**
+   * Create a new TopGroupsCollector
+   * @param groupSelector     the group selector used to define groups
+   * @param groups            the groups to collect TopDocs for
+   * @param groupSort         the order in which groups are returned
+   * @param withinGroupSort   the order in which documents are sorted in each group
+   * @param maxDocsPerGroup   the maximum number of docs to collect for each group
+   * @param getScores         if true, record the scores of all docs in each group
+   * @param getMaxScores      if true, record the maximum score for each group
+   * @param fillSortFields    if true, record the sort field values for all docs
+   */
+  public TopGroupsCollector(GroupSelector<T> groupSelector, Collection<SearchGroup<T>> groups, Sort groupSort, Sort withinGroupSort,
+                            int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) {
+    super(groupSelector, groups,
+        new TopDocsReducer<>(withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields));
+    this.groupSort = Objects.requireNonNull(groupSort);
+    this.withinGroupSort = Objects.requireNonNull(withinGroupSort);
+    this.maxDocsPerGroup = maxDocsPerGroup;
+
+  }
+
+  private static class TopDocsReducer<T> extends GroupReducer<T, TopDocsCollector<?>> {
+
+    private final Supplier<TopDocsCollector<?>> supplier;
+    private final boolean needsScores;
+
+    TopDocsReducer(Sort withinGroupSort,
+                   int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) {
+      this.needsScores = getScores || getMaxScores || withinGroupSort.needsScores();
+      this.supplier = withinGroupSort == Sort.RELEVANCE ?
+          () -> TopScoreDocCollector.create(maxDocsPerGroup) :
+          () -> TopFieldCollector.create(withinGroupSort, maxDocsPerGroup, fillSortFields, getScores, getMaxScores);
+    }
+
+    @Override
+    public boolean needsScores() {
+      return needsScores;
+    }
+
+    @Override
+    protected TopDocsCollector<?> newCollector() {
+      return supplier.get();
+    }
+  }
+
+  /**
+   * Get the TopGroups recorded by this collector
+   * @param withinGroupOffset the offset within each group to start collecting documents
+   */
+  public TopGroups<T> getTopGroups(int withinGroupOffset) {
+    @SuppressWarnings({"unchecked","rawtypes"})
+    final GroupDocs<T>[] groupDocsResult = (GroupDocs<T>[]) new GroupDocs[groups.size()];
+
+    int groupIDX = 0;
+    float maxScore = Float.MIN_VALUE;
+    for(SearchGroup<T> group : groups) {
+      TopDocsCollector<?> collector = (TopDocsCollector<?>) groupReducer.getCollector(group.groupValue);
+      final TopDocs topDocs = collector.topDocs(withinGroupOffset, maxDocsPerGroup);
+      groupDocsResult[groupIDX++] = new GroupDocs<>(Float.NaN,
+          topDocs.getMaxScore(),
+          topDocs.totalHits,
+          topDocs.scoreDocs,
+          group.groupValue,
+          group.sortValues);
+      maxScore = Math.max(maxScore, topDocs.getMaxScore());
+    }
+
+    return new TopGroups<>(groupSort.getSort(),
+        withinGroupSort.getSort(),
+        totalHitCount, totalGroupedHitCount, groupDocsResult,
+        maxScore);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java
new file mode 100644
index 0000000..2490160
--- /dev/null
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/ValueSourceGroupSelector.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search.grouping;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.queries.function.FunctionValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.util.mutable.MutableValue;
+
+/**
+ * A GroupSelector that groups via a ValueSource
+ */
+public class ValueSourceGroupSelector extends GroupSelector<MutableValue> {
+
+  private final ValueSource valueSource;
+  private final Map<?, ?> context;
+
+  private Set<MutableValue> secondPassGroups;
+
+  /**
+   * Create a new ValueSourceGroupSelector
+   * @param valueSource the ValueSource to group by
+   * @param context     a context map for the ValueSource
+   */
+  public ValueSourceGroupSelector(ValueSource valueSource, Map<?, ?> context) {
+    this.valueSource = valueSource;
+    this.context = context;
+  }
+
+  private FunctionValues.ValueFiller filler;
+
+  @Override
+  public void setNextReader(LeafReaderContext readerContext) throws IOException {
+    FunctionValues values = valueSource.getValues(context, readerContext);
+    this.filler = values.getValueFiller();
+  }
+
+  @Override
+  public State advanceTo(int doc) throws IOException {
+    this.filler.fillValue(doc);
+    if (secondPassGroups != null) {
+      if (secondPassGroups.contains(filler.getValue()) == false)
+        return State.SKIP;
+    }
+    return State.ACCEPT;
+  }
+
+  @Override
+  public MutableValue currentValue() {
+    return filler.getValue();
+  }
+
+  @Override
+  public MutableValue copyValue() {
+    return filler.getValue().duplicate();
+  }
+
+  @Override
+  public void setGroups(Collection<SearchGroup<MutableValue>> searchGroups) {
+    secondPassGroups = new HashSet<>();
+    for (SearchGroup<MutableValue> group : searchGroups) {
+      secondPassGroups.add(group.groupValue);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java
deleted file mode 100644
index f4d4668..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.function;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.queries.function.FunctionValues;
-import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.search.FieldComparator;
-import org.apache.lucene.search.LeafFieldComparator;
-import org.apache.lucene.search.Scorer;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
-import org.apache.lucene.util.mutable.MutableValue;
-
-/**
- * An implementation of {@link AllGroupHeadsCollector} for retrieving the most relevant groups when grouping
- * by {@link ValueSource}.
- *
- * @lucene.experimental
- */
-public class FunctionAllGroupHeadsCollector extends AllGroupHeadsCollector<MutableValue> {
-
-  private final ValueSource groupBy;
-  private final Map<?, ?> vsContext;
-  private final Map<MutableValue, FunctionGroupHead> groups;
-  private final Sort sortWithinGroup;
-
-  private FunctionValues.ValueFiller filler;
-  private MutableValue mval;
-  private LeafReaderContext readerContext;
-  private Scorer scorer;
-
-  /**
-   * Constructs a {@link FunctionAllGroupHeadsCollector} instance.
-   *
-   * @param groupBy The {@link ValueSource} to group by
-   * @param vsContext The ValueSource context
-   * @param sortWithinGroup The sort within a group
-   */
-  public FunctionAllGroupHeadsCollector(ValueSource groupBy, Map<?, ?> vsContext, Sort sortWithinGroup) {
-    super(sortWithinGroup.getSort().length);
-    groups = new HashMap<>();
-    this.sortWithinGroup = sortWithinGroup;
-    this.groupBy = groupBy;
-    this.vsContext = vsContext;
-
-    final SortField[] sortFields = sortWithinGroup.getSort();
-    for (int i = 0; i < sortFields.length; i++) {
-      reversed[i] = sortFields[i].getReverse() ? -1 : 1;
-    }
-  }
-
-  @Override
-  protected void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException {
-    filler.fillValue(doc);
-    FunctionGroupHead groupHead = groups.get(mval);
-    if (groupHead == null) {
-      MutableValue groupValue = mval.duplicate();
-      groupHead = new FunctionGroupHead(groupValue, sortWithinGroup, doc);
-      groups.put(groupValue, groupHead);
-      temporalResult.stop = true;
-    } else {
-      temporalResult.stop = false;
-    }
-    this.temporalResult.groupHead = groupHead;
-  }
-
-  @Override
-  protected Collection<FunctionGroupHead> getCollectedGroupHeads() {
-    return groups.values();
-  }
-
-  @Override
-  public void setScorer(Scorer scorer) throws IOException {
-    this.scorer = scorer;
-    for (FunctionGroupHead groupHead : groups.values()) {
-      for (LeafFieldComparator comparator : groupHead.leafComparators) {
-        comparator.setScorer(scorer);
-      }
-    }
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext context) throws IOException {
-    this.readerContext = context;
-    FunctionValues values = groupBy.getValues(vsContext, context);
-    filler = values.getValueFiller();
-    mval = filler.getValue();
-
-    for (FunctionGroupHead groupHead : groups.values()) {
-      for (int i = 0; i < groupHead.comparators.length; i++) {
-        groupHead.leafComparators[i] = groupHead.comparators[i].getLeafComparator(context);
-      }
-    }
-  }
-
-  /** Holds current head document for a single group.
-   *
-   * @lucene.experimental */
-  public class FunctionGroupHead extends AllGroupHeadsCollector.GroupHead<MutableValue> {
-
-    final FieldComparator<?>[] comparators;
-    final LeafFieldComparator[] leafComparators;
-
-    @SuppressWarnings({"unchecked","rawtypes"})
-    private FunctionGroupHead(MutableValue groupValue, Sort sort, int doc) throws IOException {
-      super(groupValue, doc + readerContext.docBase);
-      final SortField[] sortFields = sort.getSort();
-      comparators = new FieldComparator[sortFields.length];
-      leafComparators = new LeafFieldComparator[sortFields.length];
-      for (int i = 0; i < sortFields.length; i++) {
-        comparators[i] = sortFields[i].getComparator(1, i);
-        leafComparators[i] = comparators[i].getLeafComparator(readerContext);
-        leafComparators[i].setScorer(scorer);
-        leafComparators[i].copy(0, doc);
-        leafComparators[i].setBottom(0);
-      }
-    }
-
-    @Override
-    public int compare(int compIDX, int doc) throws IOException {
-      return leafComparators[compIDX].compareBottom(doc);
-    }
-
-    @Override
-    public void updateDocHead(int doc) throws IOException {
-      for (LeafFieldComparator comparator : leafComparators) {
-        comparator.copy(0, doc);
-        comparator.setBottom(0);
-      }
-      this.doc = doc + readerContext.docBase;
-    }
-  }
-
-  @Override
-  public boolean needsScores() {
-    return sortWithinGroup.needsScores();
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupsCollector.java
deleted file mode 100644
index 1609d4d..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupsCollector.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.function;
-
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.queries.function.FunctionValues;
-import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.search.grouping.AllGroupsCollector;
-import org.apache.lucene.util.mutable.MutableValue;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-/**
- * A collector that collects all groups that match the
- * query. Only the group value is collected, and the order
- * is undefined.  This collector does not determine
- * the most relevant document of a group.
- * <p>
- * Implementation detail: Uses {@link ValueSource} and {@link FunctionValues} to retrieve the
- * field values to group by.
- *
- * @lucene.experimental
- */
-public class FunctionAllGroupsCollector extends AllGroupsCollector<MutableValue> {
-
-  private final Map<?, ?> vsContext;
-  private final ValueSource groupBy;
-  private final SortedSet<MutableValue> groups = new TreeSet<>();
-
-  private FunctionValues.ValueFiller filler;
-  private MutableValue mval;
-
-  /**
-   * Constructs a {@link FunctionAllGroupsCollector} instance.
-   *
-   * @param groupBy The {@link ValueSource} to group by
-   * @param vsContext The ValueSource context
-   */
-  public FunctionAllGroupsCollector(ValueSource groupBy, Map<?, ?> vsContext) {
-    this.vsContext = vsContext;
-    this.groupBy = groupBy;
-  }
-
-  @Override
-  public Collection<MutableValue> getGroups() {
-    return groups;
-  }
-
-  @Override
-  public void collect(int doc) throws IOException {
-    filler.fillValue(doc);
-    if (!groups.contains(mval)) {
-      groups.add(mval.duplicate());
-    }
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext context) throws IOException {
-    FunctionValues values = groupBy.getValues(vsContext, context);
-    filler = values.getValueFiller();
-    mval = filler.getValue();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionDistinctValuesCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionDistinctValuesCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionDistinctValuesCollector.java
deleted file mode 100644
index 69ead07..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionDistinctValuesCollector.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.function;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.queries.function.FunctionValues;
-import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.search.grouping.DistinctValuesCollector;
-import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.util.mutable.MutableValue;
-
-/**
- * Function based implementation of {@link DistinctValuesCollector}.
- *
- * @lucene.experimental
- */
-public class FunctionDistinctValuesCollector extends DistinctValuesCollector<MutableValue> {
-
-  private final Map<?, ?> vsContext;
-  private final ValueSource groupSource;
-  private final ValueSource countSource;
-  private final Map<MutableValue, GroupCount<MutableValue>> groupMap;
-
-  private FunctionValues.ValueFiller groupFiller;
-  private FunctionValues.ValueFiller countFiller;
-  private MutableValue groupMval;
-  private MutableValue countMval;
-
-  public FunctionDistinctValuesCollector(Map<?, ?> vsContext, ValueSource groupSource, ValueSource countSource, Collection<SearchGroup<MutableValue>> groups) {
-    this.vsContext = vsContext;
-    this.groupSource = groupSource;
-    this.countSource = countSource;
-    groupMap = new LinkedHashMap<>();
-    for (SearchGroup<MutableValue> group : groups) {
-      groupMap.put(group.groupValue, new GroupCount<>(group.groupValue));
-    }
-  }
-
-  @Override
-  public List<GroupCount<MutableValue>> getGroups() {
-    return new ArrayList<>(groupMap.values());
-  }
-
-  @Override
-  public void collect(int doc) throws IOException {
-    groupFiller.fillValue(doc);
-    GroupCount<MutableValue> groupCount = groupMap.get(groupMval);
-    if (groupCount != null) {
-      countFiller.fillValue(doc);
-      groupCount.uniqueValues.add(countMval.duplicate());
-    }
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext context) throws IOException {
-    FunctionValues values = groupSource.getValues(vsContext, context);
-    groupFiller = values.getValueFiller();
-    groupMval = groupFiller.getValue();
-    values = countSource.getValues(vsContext, context);
-    countFiller = values.getValueFiller();
-    countMval = countFiller.getValue();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionFirstPassGroupingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionFirstPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionFirstPassGroupingCollector.java
deleted file mode 100644
index 85376e6..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionFirstPassGroupingCollector.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.function;
-
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.queries.function.FunctionValues;
-import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.FirstPassGroupingCollector;
-import org.apache.lucene.util.mutable.MutableValue;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Concrete implementation of {@link FirstPassGroupingCollector} that groups based on
- * {@link ValueSource} instances.
- *
- * @lucene.experimental
- */
-public class FunctionFirstPassGroupingCollector extends FirstPassGroupingCollector<MutableValue> {
-
-  private final ValueSource groupByVS;
-  private final Map<?, ?> vsContext;
-
-  private FunctionValues.ValueFiller filler;
-  private MutableValue mval;
-
-  /**
-   * Creates a first pass collector.
-   *
-   * @param groupByVS  The {@link ValueSource} instance to group by
-   * @param vsContext  The ValueSource context
-   * @param groupSort  The {@link Sort} used to sort the
-   *                   groups.  The top sorted document within each group
-   *                   according to groupSort, determines how that group
-   *                   sorts against other groups.  This must be non-null,
-   *                   ie, if you want to groupSort by relevance use
-   *                   Sort.RELEVANCE.
-   * @param topNGroups How many top groups to keep.
-   * @throws IOException When I/O related errors occur
-   */
-  public FunctionFirstPassGroupingCollector(ValueSource groupByVS, Map<?, ?> vsContext, Sort groupSort, int topNGroups) throws IOException {
-    super(groupSort, topNGroups);
-    this.groupByVS = groupByVS;
-    this.vsContext = vsContext;
-  }
-
-  @Override
-  protected MutableValue getDocGroupValue(int doc) throws IOException {
-    filler.fillValue(doc);
-    return mval;
-  }
-
-  @Override
-  protected MutableValue copyDocGroupValue(MutableValue groupValue, MutableValue reuse) {
-    if (reuse != null) {
-      reuse.copy(groupValue);
-      return reuse;
-    }
-    return groupValue.duplicate();
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
-    super.doSetNextReader(readerContext);
-    FunctionValues values = groupByVS.getValues(vsContext, readerContext);
-    filler = values.getValueFiller();
-    mval = filler.getValue();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionGrouper.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionGrouper.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionGrouper.java
deleted file mode 100644
index 5204dc2..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionGrouper.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.lucene.search.grouping.function;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-
-import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.AllGroupsCollector;
-import org.apache.lucene.search.grouping.FirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.SecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.Grouper;
-import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.util.mutable.MutableValue;
-
-/**
- * Collector factory for grouping by ValueSource
- */
-public class FunctionGrouper extends Grouper<MutableValue> {
-
-  private final ValueSource valueSource;
-  private final Map<?, ?> context;
-
-  /**
-   * Create a Grouper for the provided ValueSource and context
-   */
-  public FunctionGrouper(ValueSource valueSource, Map<?, ?> context) {
-    this.valueSource = valueSource;
-    this.context = context;
-  }
-
-  @Override
-  public FirstPassGroupingCollector<MutableValue> getFirstPassCollector(Sort sort, int count) throws IOException {
-    return new FunctionFirstPassGroupingCollector(valueSource, context, sort, count);
-  }
-
-  @Override
-  public AllGroupHeadsCollector<MutableValue> getGroupHeadsCollector(Sort sort) {
-    return new FunctionAllGroupHeadsCollector(valueSource, context, sort);
-  }
-
-  @Override
-  public AllGroupsCollector<MutableValue> getAllGroupsCollector() {
-    return new FunctionAllGroupsCollector(valueSource, context);
-  }
-
-  @Override
-  public SecondPassGroupingCollector<MutableValue> getSecondPassCollector(Collection<SearchGroup<MutableValue>> searchGroups, Sort groupSort, Sort withinGroupSort, int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException {
-    return new FunctionSecondPassGroupingCollector(searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, valueSource, context);
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionSecondPassGroupingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionSecondPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionSecondPassGroupingCollector.java
deleted file mode 100644
index 45f2b37..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionSecondPassGroupingCollector.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.function;
-
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.queries.function.FunctionValues;
-import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.SecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.util.mutable.MutableValue;
-import org.apache.lucene.search.grouping.TopGroups; //javadoc
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-
-/**
- * Concrete implementation of {@link SecondPassGroupingCollector} that groups based on
- * {@link ValueSource} instances.
- *
- * @lucene.experimental
- */
-public class FunctionSecondPassGroupingCollector extends SecondPassGroupingCollector<MutableValue> {
-
-  private final ValueSource groupByVS;
-  private final Map<?, ?> vsContext;
-
-  private FunctionValues.ValueFiller filler;
-  private MutableValue mval;
-
-  /**
-   * Constructs a {@link FunctionSecondPassGroupingCollector} instance.
-   *
-   * @param searchGroups The {@link SearchGroup} instances collected during the first phase.
-   * @param groupSort The group sort
-   * @param withinGroupSort The sort inside a group
-   * @param maxDocsPerGroup The maximum number of documents to collect inside a group
-   * @param getScores Whether to include the scores
-   * @param getMaxScores Whether to include the maximum score
-   * @param fillSortFields Whether to fill the sort values in {@link TopGroups#withinGroupSort}
-   * @param groupByVS The {@link ValueSource} to group by
-   * @param vsContext The value source context
-   * @throws IOException IOException When I/O related errors occur
-   */
-  public FunctionSecondPassGroupingCollector(Collection<SearchGroup<MutableValue>> searchGroups, Sort groupSort, Sort withinGroupSort, int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields, ValueSource groupByVS, Map<?, ?> vsContext) throws IOException {
-    super(searchGroups, groupSort, withinGroupSort, maxDocsPerGroup, getScores, getMaxScores, fillSortFields);
-    this.groupByVS = groupByVS;
-    this.vsContext = vsContext;
-  }
-
-  @Override
-  protected SearchGroupDocs<MutableValue> retrieveGroup(int doc) throws IOException {
-    filler.fillValue(doc);
-    return groupMap.get(mval);
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
-    super.doSetNextReader(readerContext);
-    FunctionValues values = groupByVS.getValues(vsContext, readerContext);
-    filler = values.getValueFiller();
-    mval = filler.getValue();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/package-info.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/package-info.java
deleted file mode 100644
index 73588ce..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/package-info.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Support for grouping by {@link org.apache.lucene.queries.function.ValueSource}.
- */
-package org.apache.lucene.search.grouping.function;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java
index 824a98e..7e3745e 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/package-info.java
@@ -62,9 +62,9 @@
  * </ul>
  * 
  * <p>The implementation is two-pass: the first pass ({@link
- *   org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector})
+ *   org.apache.lucene.search.grouping.FirstPassGroupingCollector})
  *   gathers the top groups, and the second pass ({@link
- *   org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector})
+ *   org.apache.lucene.search.grouping.SecondPassGroupingCollector})
  *   gathers documents within those groups.  If the search is costly to
  *   run you may want to use the {@link
  *   org.apache.lucene.search.CachingCollector} class, which
@@ -73,18 +73,17 @@
  *   hold all hits.  Results are returned as a {@link
  *   org.apache.lucene.search.grouping.TopGroups} instance.</p>
  * 
- * <p>
- *   This module abstracts away what defines group and how it is collected. All grouping collectors
- *   are abstract and have currently term based implementations. One can implement
- *   collectors that for example group on multiple fields.
- * </p>
+ * <p>Groups are defined by {@link org.apache.lucene.search.grouping.GroupSelector}
+ *   implementations:</p>
+ *   <ul>
+ *     <li>{@link org.apache.lucene.search.grouping.TermGroupSelector} groups based on
+ *     the value of a {@link org.apache.lucene.index.SortedDocValues} field</li>
+ *     <li>{@link org.apache.lucene.search.grouping.ValueSourceGroupSelector} groups based on
+ *     the value of a {@link org.apache.lucene.queries.function.ValueSource}</li>
+ *   </ul>
  * 
  * <p>Known limitations:</p>
  * <ul>
- *   <li> For the two-pass grouping search, the group field must be a
- *     indexed as a {@link org.apache.lucene.document.SortedDocValuesField}).
- *   <li> Although Solr support grouping by function and this module has abstraction of what a group is, there are currently only
- *     implementations for grouping based on terms.
  *   <li> Sharding is not directly supported, though is not too
  *     difficult, if you can merge the top groups and top documents per
  *     group yourself.
@@ -174,14 +173,15 @@
  * have to separately retrieve it (for example using stored
  * fields, <code>FieldCache</code>, etc.).
  * 
- * <p>Another collector is the <code>TermAllGroupHeadsCollector</code> that can be used to retrieve all most relevant
+ * <p>Another collector is the <code>AllGroupHeadsCollector</code> that can be used to retrieve all most relevant
  *    documents per group. Also known as group heads. This can be useful in situations when one wants to compute group
  *    based facets / statistics on the complete query result. The collector can be executed during the first or second
  *    phase. This collector can also be used with the <code>GroupingSearch</code> convenience utility, but when if one only
  *    wants to compute the most relevant documents per group it is better to just use the collector as done here below.</p>
  * 
  * <pre class="prettyprint">
- *   AbstractAllGroupHeadsCollector c = TermAllGroupHeadsCollector.create(groupField, sortWithinGroup);
+ *   TermGroupSelector grouper = new TermGroupSelector(groupField);
+ *   AllGroupHeadsCollector c = AllGroupHeadsCollector.newCollector(grouper, sortWithinGroup);
  *   s.search(new TermQuery(new Term("content", searchTerm)), c);
  *   // Return all group heads as int array
  *   int[] groupHeadsArray = c.retrieveGroupHeads()
@@ -189,12 +189,6 @@
  *   int maxDoc = s.maxDoc();
  *   FixedBitSet groupHeadsBitSet = c.retrieveGroupHeads(maxDoc)
  * </pre>
- * 
- * <p>For each of the above collector types there is also a variant that works with <code>ValueSource</code> instead of
- *    of fields. Concretely this means that these variants can work with functions. These variants are slower than
- *    there term based counter parts. These implementations are located in the
- *    <code>org.apache.lucene.search.grouping.function</code> package, but can also be used with the
- *   <code>GroupingSearch</code> convenience utility
- * </p>
+ *
  */
 package org.apache.lucene.search.grouping;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java
deleted file mode 100644
index 54e2399..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java
+++ /dev/null
@@ -1,767 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.term;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.lucene.index.DocValues;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.search.FieldComparator;
-import org.apache.lucene.search.LeafFieldComparator;
-import org.apache.lucene.search.Scorer;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.SentinelIntSet;
-
-/**
- * A base implementation of {@link AllGroupHeadsCollector} for retrieving the most relevant groups when grouping
- * on a string based group field. More specifically this all concrete implementations of this base implementation
- * use {@link SortedDocValues}.
- *
- * @lucene.experimental
- */
-public abstract class TermAllGroupHeadsCollector extends AllGroupHeadsCollector<BytesRef> {
-
-  private static final int DEFAULT_INITIAL_SIZE = 128;
-
-  final String groupField;
-
-  SortedDocValues groupIndex;
-  LeafReaderContext readerContext;
-
-  protected TermAllGroupHeadsCollector(String groupField, int numberOfSorts) {
-    super(numberOfSorts);
-    this.groupField = groupField;
-  }
-
-  /**
-   * Creates an <code>AbstractAllGroupHeadsCollector</code> instance based on the supplied arguments.
-   * This factory method decides with implementation is best suited.
-   *
-   * Delegates to {@link #create(String, org.apache.lucene.search.Sort, int)} with an initialSize of 128.
-   *
-   * @param groupField      The field to group by
-   * @param sortWithinGroup The sort within each group
-   * @return an <code>AbstractAllGroupHeadsCollector</code> instance based on the supplied arguments
-   */
-  public static AllGroupHeadsCollector<BytesRef> create(String groupField, Sort sortWithinGroup) {
-    return create(groupField, sortWithinGroup, DEFAULT_INITIAL_SIZE);
-  }
-
-  /**
-   * Creates an <code>AbstractAllGroupHeadsCollector</code> instance based on the supplied arguments.
-   * This factory method decides with implementation is best suited.
-   *
-   * @param groupField      The field to group by
-   * @param sortWithinGroup The sort within each group
-   * @param initialSize The initial allocation size of the internal int set and group list which should roughly match
-   *                    the total number of expected unique groups. Be aware that the heap usage is
-   *                    4 bytes * initialSize.
-   * @return an <code>AbstractAllGroupHeadsCollector</code> instance based on the supplied arguments
-   */
-  public static AllGroupHeadsCollector<BytesRef> create(String groupField, Sort sortWithinGroup, int initialSize) {
-    boolean sortAllScore = true;
-    boolean sortAllFieldValue = true;
-
-    for (SortField sortField : sortWithinGroup.getSort()) {
-      if (sortField.getType() == SortField.Type.SCORE) {
-        sortAllFieldValue = false;
-      } else if (needGeneralImpl(sortField)) {
-        return new GeneralAllGroupHeadsCollector(groupField, sortWithinGroup);
-      } else {
-        sortAllScore = false;
-      }
-    }
-
-    if (sortAllScore) {
-      return new ScoreAllGroupHeadsCollector(groupField, sortWithinGroup, initialSize);
-    } else if (sortAllFieldValue) {
-      return new OrdAllGroupHeadsCollector(groupField, sortWithinGroup, initialSize);
-    } else {
-      return new OrdScoreAllGroupHeadsCollector(groupField, sortWithinGroup, initialSize);
-    }
-  }
-
-  // Returns when a sort field needs the general impl.
-  private static boolean needGeneralImpl(SortField sortField) {
-    SortField.Type sortType = sortField.getType();
-    // Note (MvG): We can also make an optimized impl when sorting is SortField.DOC
-    return sortType != SortField.Type.STRING_VAL && sortType != SortField.Type.STRING && sortType != SortField.Type.SCORE;
-  }
-
-  // A general impl that works for any group sort.
-  static class GeneralAllGroupHeadsCollector extends TermAllGroupHeadsCollector {
-
-    private final Sort sortWithinGroup;
-    private final Map<BytesRef, GroupHead> groups;
-
-    Scorer scorer;
-
-    GeneralAllGroupHeadsCollector(String groupField, Sort sortWithinGroup) {
-      super(groupField, sortWithinGroup.getSort().length);
-      this.sortWithinGroup = sortWithinGroup;
-      groups = new HashMap<>();
-
-      final SortField[] sortFields = sortWithinGroup.getSort();
-      for (int i = 0; i < sortFields.length; i++) {
-        reversed[i] = sortFields[i].getReverse() ? -1 : 1;
-      }
-    }
-
-    protected int getOrdForDoc(int doc) throws IOException {
-      int curDocID = groupIndex.docID();
-      if (curDocID < doc) {
-        curDocID = groupIndex.advance(doc);
-      }
-
-      if (curDocID == doc) {
-        return groupIndex.ordValue();
-      } else {
-        return -1;
-      }
-    }
-
-    @Override
-    protected void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException {
-      int ord = getOrdForDoc(doc);
-
-      BytesRef groupValue;
-      if (ord == -1) {
-        groupValue = null;
-      } else {
-        groupValue = groupIndex.lookupOrd(ord);
-      }
-        
-      GroupHead groupHead = groups.get(groupValue);
-      if (groupHead == null) {
-        groupValue = groupValue == null ? null : BytesRef.deepCopyOf(groupValue);
-        groupHead = new GroupHead(groupValue, sortWithinGroup, doc);
-        groups.put(groupValue, groupHead);
-        temporalResult.stop = true;
-      } else {
-        temporalResult.stop = false;
-      }
-      temporalResult.groupHead = groupHead;
-    }
-
-    @Override
-    protected Collection<GroupHead> getCollectedGroupHeads() {
-      return groups.values();
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      this.readerContext = context;
-      groupIndex = DocValues.getSorted(context.reader(), groupField);
-
-      for (GroupHead groupHead : groups.values()) {
-        for (int i = 0; i < groupHead.comparators.length; i++) {
-          groupHead.leafComparators[i] = groupHead.comparators[i].getLeafComparator(context);
-        }
-      }
-    }
-
-    @Override
-    public boolean needsScores() {
-      return sortWithinGroup.needsScores();
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-      for (GroupHead groupHead : groups.values()) {
-        for (LeafFieldComparator comparator : groupHead.leafComparators) {
-          comparator.setScorer(scorer);
-        }
-      }
-    }
-
-    class GroupHead extends AllGroupHeadsCollector.GroupHead<BytesRef> {
-
-      @SuppressWarnings({"unchecked", "rawtypes"})
-      final FieldComparator[] comparators;
-      
-      final LeafFieldComparator[] leafComparators;
-
-      @SuppressWarnings({"unchecked", "rawtypes"})
-      GroupHead(BytesRef groupValue, Sort sort, int doc) throws IOException {
-        super(groupValue, doc + readerContext.docBase);
-        final SortField[] sortFields = sort.getSort();
-        comparators = new FieldComparator[sortFields.length];
-        leafComparators = new LeafFieldComparator[sortFields.length];
-        for (int i = 0; i < sortFields.length; i++) {
-          comparators[i] = sortFields[i].getComparator(1, i);
-          leafComparators[i] = comparators[i].getLeafComparator(readerContext);
-          leafComparators[i].setScorer(scorer);
-          leafComparators[i].copy(0, doc);
-          leafComparators[i].setBottom(0);
-        }
-      }
-
-      @Override
-      public int compare(int compIDX, int doc) throws IOException {
-        return leafComparators[compIDX].compareBottom(doc);
-      }
-
-      @Override
-      public void updateDocHead(int doc) throws IOException {
-        for (LeafFieldComparator comparator : leafComparators) {
-          comparator.copy(0, doc);
-          comparator.setBottom(0);
-        }
-        this.doc = doc + readerContext.docBase;
-      }
-    }
-  }
-
-
-  // AbstractAllGroupHeadsCollector optimized for ord fields and scores.
-  static class OrdScoreAllGroupHeadsCollector extends TermAllGroupHeadsCollector {
-
-    private final SentinelIntSet ordSet;
-    private final List<GroupHead> collectedGroups;
-    final SortField[] fields;
-
-    SortedDocValues[] sortsIndex;
-    Scorer scorer;
-    private GroupHead[] segmentGroupHeads;
-
-    OrdScoreAllGroupHeadsCollector(String groupField, Sort sortWithinGroup, int initialSize) {
-      super(groupField, sortWithinGroup.getSort().length);
-      ordSet = new SentinelIntSet(initialSize, -2);
-      collectedGroups = new ArrayList<>(initialSize);
-
-      final SortField[] sortFields = sortWithinGroup.getSort();
-      fields = new SortField[sortFields.length];
-      sortsIndex = new SortedDocValues[sortFields.length];
-      for (int i = 0; i < sortFields.length; i++) {
-        reversed[i] = sortFields[i].getReverse() ? -1 : 1;
-        fields[i] = sortFields[i];
-      }
-    }
-
-    @Override
-    protected Collection<GroupHead> getCollectedGroupHeads() {
-      return collectedGroups;
-    }
-
-    @Override
-    public boolean needsScores() {
-      return true;
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-    }
-
-    private int getOrdForDoc(int doc) throws IOException {
-      int curDocID = groupIndex.docID();
-      if (curDocID < doc) {
-        curDocID = groupIndex.advance(doc);
-      }
-
-      if (curDocID == doc) {
-        return groupIndex.ordValue();
-      } else {
-        return -1;
-      }
-    }
-
-    @Override
-    protected void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException {
-      int key = getOrdForDoc(doc);
-      GroupHead groupHead;
-      if (!ordSet.exists(key)) {
-        ordSet.put(key);
-        final BytesRef term;
-        if (key == -1) {
-          term = null;
-        } else {
-          term = BytesRef.deepCopyOf(groupIndex.lookupOrd(key));
-        }
-        groupHead = new GroupHead(doc, term);
-        collectedGroups.add(groupHead);
-        segmentGroupHeads[key+1] = groupHead;
-        temporalResult.stop = true;
-      } else {
-        temporalResult.stop = false;
-        groupHead = segmentGroupHeads[key+1];
-      }
-      temporalResult.groupHead = groupHead;
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      this.readerContext = context;
-      groupIndex = DocValues.getSorted(context.reader(), groupField);
-      for (int i = 0; i < fields.length; i++) {
-        if (fields[i].getType() == SortField.Type.SCORE) {
-          continue;
-        }
-
-        sortsIndex[i] = DocValues.getSorted(context.reader(), fields[i].getField());
-      }
-
-      // Clear ordSet and fill it with previous encountered groups that can occur in the current segment.
-      ordSet.clear();
-      segmentGroupHeads = new GroupHead[groupIndex.getValueCount()+1];
-      for (GroupHead collectedGroup : collectedGroups) {
-        int ord;
-        if (collectedGroup.groupValue == null) {
-          ord = -1;
-        } else {
-          ord = groupIndex.lookupTerm(collectedGroup.groupValue);
-        }
-        if (collectedGroup.groupValue == null || ord >= 0) {
-          ordSet.put(ord);
-          segmentGroupHeads[ord+1] = collectedGroup;
-
-          for (int i = 0; i < sortsIndex.length; i++) {
-            if (fields[i].getType() == SortField.Type.SCORE) {
-              continue;
-            }
-            int sortOrd;
-            if (collectedGroup.sortValues[i] == null) {
-              sortOrd = -1;
-            } else {
-              sortOrd = sortsIndex[i].lookupTerm(collectedGroup.sortValues[i].get());
-            }
-            collectedGroup.sortOrds[i] = sortOrd;
-          }
-        }
-      }
-    }
-
-    void setDoc(int docID) throws IOException {
-      for (int i = 0; i < sortsIndex.length; i++) {
-        SortedDocValues values = sortsIndex[i];
-        if (values != null && docID > values.docID()) {
-          values.advance(docID);
-        }
-      }
-    }
-
-    class GroupHead extends AllGroupHeadsCollector.GroupHead<BytesRef> {
-
-      BytesRefBuilder[] sortValues;
-      int[] sortOrds;
-      float[] scores;
-
-      GroupHead(int doc, BytesRef groupValue) throws IOException {
-        super(groupValue, doc + readerContext.docBase);
-        sortValues = new BytesRefBuilder[sortsIndex.length];
-        sortOrds = new int[sortsIndex.length];
-        scores = new float[sortsIndex.length];
-        setDoc(doc);
-        for (int i = 0; i < sortsIndex.length; i++) {
-          if (fields[i].getType() == SortField.Type.SCORE) {
-            scores[i] = scorer.score();
-          } else {
-            if (doc == sortsIndex[i].docID()) {
-              sortOrds[i] = sortsIndex[i].ordValue();
-            } else {
-              sortOrds[i] = -1;
-            }
-            sortValues[i] = new BytesRefBuilder();
-            if (sortOrds[i] != -1) {
-              sortValues[i].copyBytes(sortsIndex[i].binaryValue());
-            }
-          }
-        }
-      }
-
-      @Override
-      public int compare(int compIDX, int doc) throws IOException {
-        if (fields[compIDX].getType() == SortField.Type.SCORE) {
-          float score = scorer.score();
-          if (scores[compIDX] < score) {
-            return 1;
-          } else if (scores[compIDX] > score) {
-            return -1;
-          }
-          return 0;
-        } else {
-          if (sortsIndex[compIDX].docID() < doc) {
-            sortsIndex[compIDX].advance(doc);
-          }
-          if (sortOrds[compIDX] < 0) {
-            // The current segment doesn't contain the sort value we encountered before. Therefore the ord is negative.
-            final BytesRef term;
-            if (sortsIndex[compIDX].docID() == doc) {
-              term = sortsIndex[compIDX].binaryValue();
-            } else {
-              term = new BytesRef(BytesRef.EMPTY_BYTES);
-            }
-            return sortValues[compIDX].get().compareTo(term);
-          } else {
-            int ord;
-            if (sortsIndex[compIDX].docID() == doc) {
-              ord = sortsIndex[compIDX].ordValue();
-            } else {
-              ord = -1;
-            }
-            return sortOrds[compIDX] - ord;
-          }
-        }
-      }
-
-      @Override
-      public void updateDocHead(int doc) throws IOException {
-        setDoc(doc);
-        for (int i = 0; i < sortsIndex.length; i++) {
-          if (fields[i].getType() == SortField.Type.SCORE) {
-            scores[i] = scorer.score();
-          } else {
-            if (sortsIndex[i].docID() == doc) {
-              sortOrds[i] = sortsIndex[i].ordValue();
-              sortValues[i].copyBytes(sortsIndex[i].binaryValue());
-            } else {
-              sortOrds[i] = -1;
-              sortValues[i].clear();
-            }
-          }
-        }
-        this.doc = doc + readerContext.docBase;
-      }
-    }
-  }
-
-
-  // AbstractAllGroupHeadsCollector optimized for ord fields.
-  static class OrdAllGroupHeadsCollector extends TermAllGroupHeadsCollector {
-
-    private final SentinelIntSet ordSet;
-    private final List<GroupHead> collectedGroups;
-    private final SortField[] fields;
-
-    SortedDocValues[] sortsIndex;
-    GroupHead[] segmentGroupHeads;
-
-    OrdAllGroupHeadsCollector(String groupField, Sort sortWithinGroup, int initialSize) {
-      super(groupField, sortWithinGroup.getSort().length);
-      ordSet = new SentinelIntSet(initialSize, -2);
-      collectedGroups = new ArrayList<>(initialSize);
-
-      final SortField[] sortFields = sortWithinGroup.getSort();
-      fields = new SortField[sortFields.length];
-      sortsIndex = new SortedDocValues[sortFields.length];
-      for (int i = 0; i < sortFields.length; i++) {
-        reversed[i] = sortFields[i].getReverse() ? -1 : 1;
-        fields[i] = sortFields[i];
-      }
-    }
-
-    @Override
-    protected Collection<GroupHead> getCollectedGroupHeads() {
-      return collectedGroups;
-    }
-
-    @Override
-    public boolean needsScores() {
-      return false;
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-    }
-
-    @Override
-    protected void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException {
-      if (doc > groupIndex.docID()) {
-        groupIndex.advance(doc);
-      }
-      
-      int key;
-      if (doc == groupIndex.docID()) {
-        key = groupIndex.ordValue();
-      } else {
-        key = -1;
-      }
-      
-      GroupHead groupHead;
-      if (!ordSet.exists(key)) {
-        ordSet.put(key);
-        final BytesRef term;
-        if (key == -1) {
-          term = null;
-        } else {
-          term = BytesRef.deepCopyOf(groupIndex.lookupOrd(key));
-        }
-        groupHead = new GroupHead(doc, term);
-        collectedGroups.add(groupHead);
-        segmentGroupHeads[key+1] = groupHead;
-        temporalResult.stop = true;
-      } else {
-        temporalResult.stop = false;
-        groupHead = segmentGroupHeads[key+1];
-      }
-      temporalResult.groupHead = groupHead;
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      this.readerContext = context;
-      groupIndex = DocValues.getSorted(context.reader(), groupField);
-      for (int i = 0; i < fields.length; i++) {
-        sortsIndex[i] = DocValues.getSorted(context.reader(), fields[i].getField());
-      }
-
-      // Clear ordSet and fill it with previous encountered groups that can occur in the current segment.
-      ordSet.clear();
-      segmentGroupHeads = new GroupHead[groupIndex.getValueCount()+1];
-      for (GroupHead collectedGroup : collectedGroups) {
-        int groupOrd;
-        if (collectedGroup.groupValue == null) {
-          groupOrd = -1;
-        } else {
-          groupOrd = groupIndex.lookupTerm(collectedGroup.groupValue);
-        }
-        if (collectedGroup.groupValue == null || groupOrd >= 0) {
-          ordSet.put(groupOrd);
-          segmentGroupHeads[groupOrd+1] = collectedGroup;
-
-          for (int i = 0; i < sortsIndex.length; i++) {
-            int sortOrd;
-            if (collectedGroup.sortOrds[i] == -1) {
-              sortOrd = -1;
-            } else {
-              sortOrd = sortsIndex[i].lookupTerm(collectedGroup.sortValues[i].get());
-            }
-            collectedGroup.sortOrds[i] = sortOrd;
-          }
-        }
-      }
-    }
-
-    void setDoc(int docID) throws IOException {
-      for (int i = 0; i < sortsIndex.length; i++) {
-        SortedDocValues values = sortsIndex[i];
-        if (docID > values.docID()) {
-          values.advance(docID);
-        }
-      }
-    }
-
-    class GroupHead extends AllGroupHeadsCollector.GroupHead<BytesRef> {
-
-      BytesRefBuilder[] sortValues;
-      int[] sortOrds;
-
-      GroupHead(int doc, BytesRef groupValue) throws IOException {
-        super(groupValue, doc + readerContext.docBase);
-        sortValues = new BytesRefBuilder[sortsIndex.length];
-        sortOrds = new int[sortsIndex.length];
-        setDoc(doc);
-        for (int i = 0; i < sortsIndex.length; i++) {
-          if (doc == sortsIndex[i].docID()) {
-            sortOrds[i] = sortsIndex[i].ordValue();
-          } else {
-            sortOrds[i] = -1;
-          }
-          sortValues[i] = new BytesRefBuilder();
-          if (sortOrds[i] != -1) {
-            sortValues[i].copyBytes(sortsIndex[i].binaryValue());
-          }
-        }
-      }
-
-      @Override
-      public int compare(int compIDX, int doc) throws IOException {
-        if (sortsIndex[compIDX].docID() < doc) {
-          sortsIndex[compIDX].advance(doc);
-        }
-        if (sortOrds[compIDX] < 0) {
-          // The current segment doesn't contain the sort value we encountered before. Therefore the ord is negative.
-          final BytesRef term;
-          if (sortsIndex[compIDX].docID() == doc) {
-            term = sortsIndex[compIDX].binaryValue();
-          } else {
-            term = new BytesRef(BytesRef.EMPTY_BYTES);
-          }
-          return sortValues[compIDX].get().compareTo(term);
-        } else {
-          int ord;
-          if (sortsIndex[compIDX].docID() == doc) {
-            ord = sortsIndex[compIDX].ordValue();
-          } else {
-            ord = -1;
-          }
-          return sortOrds[compIDX] - ord;
-        }
-      }
-
-      @Override
-      public void updateDocHead(int doc) throws IOException {
-        setDoc(doc);
-        for (int i = 0; i < sortsIndex.length; i++) {
-          if (sortsIndex[i].docID() == doc) {
-            sortOrds[i] = sortsIndex[i].ordValue();
-            sortValues[i].copyBytes(sortsIndex[i].binaryValue());
-          } else {
-            sortOrds[i] = -1;
-            sortValues[i].clear();
-          }
-        }
-        this.doc = doc + readerContext.docBase;
-      }
-
-    }
-
-  }
-
-
-  // AbstractAllGroupHeadsCollector optimized for scores.
-  static class ScoreAllGroupHeadsCollector extends TermAllGroupHeadsCollector {
-
-    final SentinelIntSet ordSet;
-    final List<GroupHead> collectedGroups;
-    final SortField[] fields;
-
-    Scorer scorer;
-    GroupHead[] segmentGroupHeads;
-
-    ScoreAllGroupHeadsCollector(String groupField, Sort sortWithinGroup, int initialSize) {
-      super(groupField, sortWithinGroup.getSort().length);
-      ordSet = new SentinelIntSet(initialSize, -2);
-      collectedGroups = new ArrayList<>(initialSize);
-
-      final SortField[] sortFields = sortWithinGroup.getSort();
-      fields = new SortField[sortFields.length];
-      for (int i = 0; i < sortFields.length; i++) {
-        reversed[i] = sortFields[i].getReverse() ? -1 : 1;
-        fields[i] = sortFields[i];
-      }
-    }
-
-    @Override
-    protected Collection<GroupHead> getCollectedGroupHeads() {
-      return collectedGroups;
-    }
-
-    @Override
-    public boolean needsScores() {
-      return true;
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-    }
-
-    @Override
-    protected void retrieveGroupHeadAndAddIfNotExist(int doc) throws IOException {
-      if (doc > groupIndex.docID()) {
-        groupIndex.advance(doc);
-      }
-
-      int key;
-      if (doc == groupIndex.docID()) {
-        key = groupIndex.ordValue();
-      } else {
-        key = -1;
-      }
-        
-      GroupHead groupHead;
-      if (!ordSet.exists(key)) {
-        ordSet.put(key);
-        final BytesRef term;
-        if (key == -1) {
-          term = null;
-        } else {
-          term = BytesRef.deepCopyOf(groupIndex.lookupOrd(key));
-        }
-        groupHead = new GroupHead(doc, term);
-        collectedGroups.add(groupHead);
-        segmentGroupHeads[key+1] = groupHead;
-        temporalResult.stop = true;
-      } else {
-        temporalResult.stop = false;
-        groupHead = segmentGroupHeads[key+1];
-      }
-      temporalResult.groupHead = groupHead;
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      this.readerContext = context;
-      groupIndex = DocValues.getSorted(context.reader(), groupField);
-
-      // Clear ordSet and fill it with previous encountered groups that can occur in the current segment.
-      ordSet.clear();
-      segmentGroupHeads = new GroupHead[groupIndex.getValueCount()+1];
-      for (GroupHead collectedGroup : collectedGroups) {
-        int ord;
-        if (collectedGroup.groupValue == null) {
-          ord = -1;
-        } else {
-          ord = groupIndex.lookupTerm(collectedGroup.groupValue);
-        }
-        if (collectedGroup.groupValue == null || ord >= 0) {
-          ordSet.put(ord);
-          segmentGroupHeads[ord+1] = collectedGroup;
-        }
-      }
-    }
-
-    class GroupHead extends AllGroupHeadsCollector.GroupHead<BytesRef> {
-
-      float[] scores;
-
-      GroupHead(int doc, BytesRef groupValue) throws IOException {
-        super(groupValue, doc + readerContext.docBase);
-        scores = new float[fields.length];
-        float score = scorer.score();
-        for (int i = 0; i < scores.length; i++) {
-          scores[i] = score;
-        }
-      }
-
-      @Override
-      public int compare(int compIDX, int doc) throws IOException {
-        float score = scorer.score();
-        if (scores[compIDX] < score) {
-          return 1;
-        } else if (scores[compIDX] > score) {
-          return -1;
-        }
-        return 0;
-      }
-
-      @Override
-      public void updateDocHead(int doc) throws IOException {
-        float score = scorer.score();
-        for (int i = 0; i < scores.length; i++) {
-          scores[i] = score;
-        }
-        this.doc = doc + readerContext.docBase;
-      }
-
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupsCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupsCollector.java
deleted file mode 100644
index 125555a..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupsCollector.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.term;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.lucene.index.DocValues;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.search.grouping.AllGroupsCollector;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.SentinelIntSet;
-
-/**
- * A collector that collects all groups that match the
- * query. Only the group value is collected, and the order
- * is undefined.  This collector does not determine
- * the most relevant document of a group.
- * <p>
- * Implementation detail: an int hash set (SentinelIntSet)
- * is used to detect if a group is already added to the
- * total count.  For each segment the int set is cleared and filled
- * with previous counted groups that occur in the new
- * segment.
- *
- * @lucene.experimental
- */
-public class TermAllGroupsCollector extends AllGroupsCollector<BytesRef> {
-
-  private static final int DEFAULT_INITIAL_SIZE = 128;
-
-  private final String groupField;
-  private final SentinelIntSet ordSet;
-  private final List<BytesRef> groups;
-
-  private SortedDocValues index;
-
-  /**
-   * Expert: Constructs a {@link AllGroupsCollector}
-   *
-   * @param groupField  The field to group by
-   * @param initialSize The initial allocation size of the
-   *                    internal int set and group list
-   *                    which should roughly match the total
-   *                    number of expected unique groups. Be aware that the
-   *                    heap usage is 4 bytes * initialSize.
-   */
-  public TermAllGroupsCollector(String groupField, int initialSize) {
-    ordSet = new SentinelIntSet(initialSize, -2);
-    groups = new ArrayList<>(initialSize);
-    this.groupField = groupField;
-  }
-
-  /**
-   * Constructs a {@link AllGroupsCollector}. This sets the
-   * initial allocation size for the internal int set and group
-   * list to 128.
-   *
-   * @param groupField The field to group by
-   */
-  public TermAllGroupsCollector(String groupField) {
-    this(groupField, DEFAULT_INITIAL_SIZE);
-  }
-
-  @Override
-  public void collect(int doc) throws IOException {
-    if (doc > index.docID()) {
-      index.advance(doc);
-    }
-    int key;
-    if (doc == index.docID()) {
-      key = index.ordValue();
-    } else {
-      key = -1;
-    }
-    if (!ordSet.exists(key)) {
-      ordSet.put(key);
-      final BytesRef term;
-      if (key == -1) {
-        term = null;
-      } else {
-        term = BytesRef.deepCopyOf(index.lookupOrd(key));
-      }
-      groups.add(term);
-    }
-  }
-
-  @Override
-  public Collection<BytesRef> getGroups() {
-    return groups;
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext context) throws IOException {
-    index = DocValues.getSorted(context.reader(), groupField);
-
-    // Clear ordSet and fill it with previous encountered groups that can occur in the current segment.
-    ordSet.clear();
-    for (BytesRef countedGroup : groups) {
-      if (countedGroup == null) {
-        ordSet.put(-1);
-      } else {
-        int ord = index.lookupTerm(countedGroup);
-        if (ord >= 0) {
-          ordSet.put(ord);
-        }
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermDistinctValuesCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermDistinctValuesCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermDistinctValuesCollector.java
deleted file mode 100644
index e5356a3..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermDistinctValuesCollector.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.term;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.lucene.index.DocValues;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.search.grouping.DistinctValuesCollector;
-import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.SentinelIntSet;
-
-/**
- * A term based implementation of {@link DistinctValuesCollector} that relies
- * on {@link SortedDocValues} to count the distinct values per group.
- *
- * @lucene.experimental
- */
-public class TermDistinctValuesCollector extends DistinctValuesCollector<BytesRef> {
-
-  private final String groupField;
-  private final String countField;
-  private final List<TermGroupCount> groups;
-  private final SentinelIntSet ordSet;
-  private final TermGroupCount groupCounts[];
-
-  private SortedDocValues groupFieldTermIndex;
-  private SortedDocValues countFieldTermIndex;
-
-  /**
-   * Constructs {@link TermDistinctValuesCollector} instance.
-   *
-   * @param groupField The field to group by
-   * @param countField The field to count distinct values for
-   * @param groups The top N groups, collected during the first phase search
-   */
-  public TermDistinctValuesCollector(String groupField, String countField, Collection<SearchGroup<BytesRef>> groups) {
-    this.groupField = groupField;
-    this.countField = countField;
-    this.groups = new ArrayList<>(groups.size());
-    for (SearchGroup<BytesRef> group : groups) {
-      this.groups.add(new TermGroupCount(group.groupValue));
-    }
-    ordSet = new SentinelIntSet(groups.size(), -2);
-    groupCounts = new TermGroupCount[ordSet.keys.length];
-  }
-
-  @Override
-  public void collect(int doc) throws IOException {
-    if (doc > groupFieldTermIndex.docID()) {
-      groupFieldTermIndex.advance(doc);
-    }
-    int ord;
-    if (doc == groupFieldTermIndex.docID()) {
-      ord = groupFieldTermIndex.ordValue();
-    } else {
-      ord = -1;
-    }
-    int slot = ordSet.find(ord);
-    if (slot < 0) {
-      return;
-    }
-
-    TermGroupCount gc = groupCounts[slot];
-    if (doc > countFieldTermIndex.docID()) {
-      countFieldTermIndex.advance(doc);
-    }
-
-    int countOrd;
-    if (doc == countFieldTermIndex.docID()) {
-      countOrd = countFieldTermIndex.ordValue();
-    } else {
-      countOrd = -1;
-    }
-    
-    if (doesNotContainOrd(countOrd, gc.ords)) {
-      if (countOrd == -1) {
-        gc.uniqueValues.add(null);
-      } else {
-        BytesRef term = BytesRef.deepCopyOf(countFieldTermIndex.lookupOrd(countOrd));
-        gc.uniqueValues.add(term);
-      }
-
-      gc.ords = Arrays.copyOf(gc.ords, gc.ords.length + 1);
-      gc.ords[gc.ords.length - 1] = countOrd;
-      if (gc.ords.length > 1) {
-        Arrays.sort(gc.ords);
-      }
-    }
-  }
-
-  private boolean doesNotContainOrd(int ord, int[] ords) {
-    if (ords.length == 0) {
-      return true;
-    } else if (ords.length == 1) {
-      return ord != ords[0];
-    }
-    return Arrays.binarySearch(ords, ord) < 0;
-  }
-
-  @Override
-  public List<GroupCount<BytesRef>> getGroups() {
-    return new ArrayList<>(groups);
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext context) throws IOException {
-    groupFieldTermIndex = DocValues.getSorted(context.reader(), groupField);
-    countFieldTermIndex = DocValues.getSorted(context.reader(), countField);
-    ordSet.clear();
-    for (TermGroupCount group : groups) {
-      int groupOrd = group.groupValue == null ? -1 : groupFieldTermIndex.lookupTerm(group.groupValue);
-      if (group.groupValue != null && groupOrd < 0) {
-        continue;
-      }
-
-      groupCounts[ordSet.put(groupOrd)] = group;
-      group.ords = new int[group.uniqueValues.size()];
-      Arrays.fill(group.ords, -2);
-      int i = 0;
-      for (BytesRef value : group.uniqueValues) {
-        int countOrd = value == null ? -1 : countFieldTermIndex.lookupTerm(value);
-        if (value == null || countOrd >= 0) {
-          group.ords[i++] = countOrd;
-        }
-      }
-    }
-  }
-
-  /** Holds distinct values for a single group.
-   *
-   * @lucene.experimental */
-  public static class TermGroupCount extends DistinctValuesCollector.GroupCount<BytesRef> {
-
-    int[] ords;
-
-    TermGroupCount(BytesRef groupValue) {
-      super(groupValue);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermFirstPassGroupingCollector.java
----------------------------------------------------------------------
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermFirstPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermFirstPassGroupingCollector.java
deleted file mode 100644
index 3c35fa8..0000000
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermFirstPassGroupingCollector.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.search.grouping.term;
-
-import java.io.IOException;
-
-import org.apache.lucene.index.DocValues;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.FirstPassGroupingCollector;
-import org.apache.lucene.util.ArrayUtil;
-import org.apache.lucene.util.BytesRef;
-
-/**
- * Concrete implementation of {@link FirstPassGroupingCollector} that groups based on
- * field values and more specifically uses {@link SortedDocValues}
- * to collect groups.
- *
- * @lucene.experimental
- */
-public class TermFirstPassGroupingCollector extends FirstPassGroupingCollector<BytesRef> {
-
-  private SortedDocValues index;
-
-  private String groupField;
-
-  /**
-   * Create the first pass collector.
-   *
-   *  @param groupField The field used to group
-   *    documents. This field must be single-valued and
-   *    indexed (DocValues is used to access its value
-   *    per-document).
-   *  @param groupSort The {@link Sort} used to sort the
-   *    groups.  The top sorted document within each group
-   *    according to groupSort, determines how that group
-   *    sorts against other groups.  This must be non-null,
-   *    ie, if you want to groupSort by relevance use
-   *    Sort.RELEVANCE.
-   *  @param topNGroups How many top groups to keep.
-   *  @throws IOException When I/O related errors occur
-   */
-  public TermFirstPassGroupingCollector(String groupField, Sort groupSort, int topNGroups) throws IOException {
-    super(groupSort, topNGroups);
-    this.groupField = groupField;
-  }
-
-  @Override
-  protected BytesRef getDocGroupValue(int doc) throws IOException {
-    if (doc > index.docID()) {
-      index.advance(doc);
-    }
-    if (doc == index.docID()) {
-      return index.binaryValue();
-    } else {
-      return null;
-    }
-  }
-
-  @Override
-  protected BytesRef copyDocGroupValue(BytesRef groupValue, BytesRef reuse) {
-    if (groupValue == null) {
-      return null;
-    } else if (reuse != null) {
-      reuse.bytes = ArrayUtil.grow(reuse.bytes, groupValue.length);
-      reuse.offset = 0;
-      reuse.length = groupValue.length;
-      System.arraycopy(groupValue.bytes, groupValue.offset, reuse.bytes, 0, groupValue.length);
-      return reuse;
-    } else {
-      return BytesRef.deepCopyOf(groupValue);
-    }
-  }
-
-  @Override
-  protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
-    super.doSetNextReader(readerContext);
-    index = DocValues.getSorted(readerContext.reader(), groupField);
-  }
-
-}


[09/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
index dfb5a0f..56dab37 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
@@ -29,10 +29,11 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.NodeConfig;
 import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.core.SolrXmlConfig;
 import org.apache.solr.metrics.reporters.MockMetricReporter;
+import org.apache.solr.util.JmxUtil;
 import org.apache.solr.util.TestHarness;
 import org.junit.After;
 import org.junit.Before;
@@ -50,11 +51,12 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
   private static final String MULTIREGISTRY = "multiregistry";
   private static final String[] INITIAL_REPORTERS = {REPORTER_NAMES[0], REPORTER_NAMES[1], UNIVERSAL, SPECIFIC, MULTIGROUP, MULTIREGISTRY};
   private static final String[] RENAMED_REPORTERS = {REPORTER_NAMES[0], REPORTER_NAMES[1], UNIVERSAL, MULTIGROUP};
-  private static final SolrInfoMBean.Category HANDLER_CATEGORY = SolrInfoMBean.Category.QUERY;
+  private static final SolrInfoBean.Category HANDLER_CATEGORY = SolrInfoBean.Category.QUERY;
 
   private CoreContainer cc;
   private SolrMetricManager metricManager;
   private String tag;
+  private int jmxReporter;
 
   private void assertTagged(Map<String, SolrMetricReporter> reporters, String name) {
     assertTrue("Reporter '" + name + "' missing in " + reporters, reporters.containsKey(name + "@" + tag));
@@ -71,11 +73,12 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
     cc = createCoreContainer(cfg,
         new TestHarness.TestCoresLocator(DEFAULT_TEST_CORENAME, initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml"));
     h.coreName = DEFAULT_TEST_CORENAME;
+    jmxReporter = JmxUtil.findFirstMBeanServer() != null ? 1 : 0;
     metricManager = cc.getMetricManager();
     tag = h.getCore().getCoreMetricManager().getTag();
     // initially there are more reporters, because two of them are added via a matching collection name
     Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.core." + DEFAULT_TEST_CORENAME);
-    assertEquals(INITIAL_REPORTERS.length, reporters.size());
+    assertEquals(INITIAL_REPORTERS.length + jmxReporter, reporters.size());
     for (String r : INITIAL_REPORTERS) {
       assertTagged(reporters, r);
     }
@@ -85,9 +88,9 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
     cfg = cc.getConfig();
     PluginInfo[] plugins = cfg.getMetricReporterPlugins();
     assertNotNull(plugins);
-    assertEquals(10, plugins.length);
+    assertEquals(10 + jmxReporter, plugins.length);
     reporters = metricManager.getReporters("solr.node");
-    assertEquals(4, reporters.size());
+    assertEquals(4 + jmxReporter, reporters.size());
     assertTrue("Reporter '" + REPORTER_NAMES[0] + "' missing in solr.node", reporters.containsKey(REPORTER_NAMES[0]));
     assertTrue("Reporter '" + UNIVERSAL + "' missing in solr.node", reporters.containsKey(UNIVERSAL));
     assertTrue("Reporter '" + MULTIGROUP + "' missing in solr.node", reporters.containsKey(MULTIGROUP));
@@ -120,7 +123,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
 
     String metricName = SolrMetricManager.mkName(METRIC_NAME, HANDLER_CATEGORY.toString(), HANDLER_NAME);
     SolrCoreMetricManager coreMetricManager = h.getCore().getCoreMetricManager();
-    Timer timer = (Timer) metricManager.timer(coreMetricManager.getRegistryName(), metricName);
+    Timer timer = (Timer) metricManager.timer(null, coreMetricManager.getRegistryName(), metricName);
 
     long initialCount = timer.getCount();
 
@@ -132,7 +135,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
     long finalCount = timer.getCount();
     assertEquals("metric counter incorrect", iterations, finalCount - initialCount);
     Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
-    assertEquals(RENAMED_REPORTERS.length, reporters.size());
+    assertEquals(RENAMED_REPORTERS.length + jmxReporter, reporters.size());
 
     // SPECIFIC and MULTIREGISTRY were skipped because they were
     // specific to collection1

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGangliaReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGangliaReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGangliaReporterTest.java
index c50ff3c..eca414c 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGangliaReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGangliaReporterTest.java
@@ -64,7 +64,7 @@ public class SolrGangliaReporterTest extends SolrTestCaseJ4 {
     h.coreName = DEFAULT_TEST_CORENAME;
     SolrMetricManager metricManager = cc.getMetricManager();
     Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.node");
-    assertEquals(1, reporters.size());
+    assertTrue(reporters.toString(), reporters.size() >= 1);
     SolrMetricReporter reporter = reporters.get("test");
     assertNotNull(reporter);
     assertTrue(reporter instanceof SolrGangliaReporter);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java
index f45b193..5438504 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java
@@ -35,6 +35,7 @@ import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.core.SolrXmlConfig;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricReporter;
+import org.apache.solr.util.JmxUtil;
 import org.apache.solr.util.TestHarness;
 import org.junit.Test;
 
@@ -45,6 +46,7 @@ public class SolrGraphiteReporterTest extends SolrTestCaseJ4 {
 
   @Test
   public void testReporter() throws Exception {
+    int jmxReporter = JmxUtil.findFirstMBeanServer() != null ? 1: 0;
     Path home = Paths.get(TEST_HOME());
     // define these properties, they are used in solrconfig.xml
     System.setProperty("solr.test.sys.prop1", "propone");
@@ -63,7 +65,7 @@ public class SolrGraphiteReporterTest extends SolrTestCaseJ4 {
       h.coreName = DEFAULT_TEST_CORENAME;
       SolrMetricManager metricManager = cc.getMetricManager();
       Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.node");
-      assertEquals(1, reporters.size());
+      assertEquals(1 + jmxReporter, reporters.size());
       SolrMetricReporter reporter = reporters.get("test");
       assertNotNull(reporter);
       assertTrue(reporter instanceof SolrGraphiteReporter);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
index 82b9d58..b10799e 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
@@ -20,6 +20,7 @@ import javax.management.MBeanServer;
 import javax.management.ObjectInstance;
 import javax.management.ObjectName;
 
+import java.rmi.registry.LocateRegistry;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
@@ -31,7 +32,7 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrCoreMetricManager;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
@@ -40,12 +41,15 @@ import org.apache.solr.metrics.SolrMetricTestUtils;
 import org.apache.solr.schema.FieldType;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class SolrJmxReporterTest extends SolrTestCaseJ4 {
 
   private static final int MAX_ITERATIONS = 20;
 
+  private static int jmxPort;
+
   private String domain;
 
   private SolrCoreMetricManager coreMetricManager;
@@ -53,6 +57,14 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
   private SolrJmxReporter reporter;
   private MBeanServer mBeanServer;
   private String reporterName;
+  private String rootName;
+
+  @BeforeClass
+  public static void init() throws Exception {
+    jmxPort = getNextAvailablePort();
+    assertFalse(jmxPort == -1);
+    LocateRegistry.createRegistry(jmxPort);
+  }
 
   @Before
   public void beforeTest() throws Exception {
@@ -60,10 +72,11 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
 
     final SolrCore core = h.getCore();
     domain = core.getName();
+    rootName = TestUtil.randomSimpleString(random(), 1, 10);
 
     coreMetricManager = core.getCoreMetricManager();
-    metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
-    PluginInfo pluginInfo = createReporterPluginInfo();
+    metricManager = core.getCoreContainer().getMetricManager();
+    PluginInfo pluginInfo = createReporterPluginInfo(rootName, true);
     metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
         pluginInfo, coreMetricManager.getTag());
 
@@ -79,7 +92,7 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
     assertNotNull("MBean server not found.", mBeanServer);
   }
 
-  private PluginInfo createReporterPluginInfo() {
+  private PluginInfo createReporterPluginInfo(String rootName, boolean enabled) {
     Random random = random();
     String className = SolrJmxReporter.class.getName();
     String reporterName = TestUtil.randomSimpleString(random, 1, 10);
@@ -87,6 +100,9 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
     Map<String, Object> attrs = new HashMap<>();
     attrs.put(FieldType.CLASS_NAME, className);
     attrs.put(CoreAdminParams.NAME, reporterName);
+    attrs.put("rootName", rootName);
+    attrs.put("enabled", enabled);
+    attrs.put("serviceUrl", "service:jmx:rmi:///jndi/rmi://localhost:" + jmxPort + "/solrjmx");
 
     boolean shouldOverrideDomain = random.nextBoolean();
     if (shouldOverrideDomain) {
@@ -114,7 +130,7 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
 
     Map<String, Counter> registered = new HashMap<>();
     String scope = SolrMetricTestUtils.getRandomScope(random, true);
-    SolrInfoMBean.Category category = SolrMetricTestUtils.getRandomCategory(random, true);
+    SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random, true);
 
     int iterations = TestUtil.nextInt(random, 0, MAX_ITERATIONS);
     for (int i = 0; i < iterations; ++i) {
@@ -126,7 +142,7 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
       Set<ObjectInstance> objects = mBeanServer.queryMBeans(null, null);
       assertEquals(registered.size(), objects.stream().
           filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) &&
-                      reporterName.equals(o.getObjectName().getKeyProperty("reporter"))).count());
+                      rootName.equals(o.getObjectName().getDomain())).count());
     }
   }
 
@@ -135,17 +151,17 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
     Random random = random();
 
     String scope = SolrMetricTestUtils.getRandomScope(random, true);
-    SolrInfoMBean.Category category = SolrMetricTestUtils.getRandomCategory(random, true);
+    SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random, true);
     Map<String, Counter> metrics = SolrMetricTestUtils.getRandomMetrics(random, true);
     SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics);
     coreMetricManager.registerMetricProducer(scope, producer);
     Set<ObjectInstance> objects = mBeanServer.queryMBeans(null, null);
     assertEquals(metrics.size(), objects.stream().
         filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) &&
-            reporterName.equals(o.getObjectName().getKeyProperty("reporter"))).count());
+        o.getObjectName().getDomain().equals(rootName)).count());
 
     h.getCoreContainer().reload(h.getCore().getName());
-    PluginInfo pluginInfo = createReporterPluginInfo();
+    PluginInfo pluginInfo = createReporterPluginInfo(rootName, true);
     metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
         pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode()));
     coreMetricManager.registerMetricProducer(scope, producer);
@@ -153,7 +169,38 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
     objects = mBeanServer.queryMBeans(null, null);
     assertEquals(metrics.size(), objects.stream().
         filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) &&
-            pluginInfo.name.equals(o.getObjectName().getKeyProperty("reporter"))).count());
+            rootName.equals(o.getObjectName().getDomain())).count());
+  }
+
+  @Test
+  public void testEnabled() throws Exception {
+    String root1 = TestUtil.randomSimpleString(random(), 1, 10);
+    PluginInfo pluginInfo1 = createReporterPluginInfo(root1, true);
+    metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+        pluginInfo1, coreMetricManager.getTag());
+
+    String root2 = TestUtil.randomSimpleString(random(), 1, 10);
+    assertFalse(root2.equals(root1));
+    PluginInfo pluginInfo2 = createReporterPluginInfo(root2, false);
+    metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+        pluginInfo2, coreMetricManager.getTag());
+
+    Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
+    assertTrue(reporters.containsKey(pluginInfo1.name + "@" + coreMetricManager.getTag()));
+    assertTrue(reporters.containsKey(pluginInfo2.name + "@" + coreMetricManager.getTag()));
+
+    String scope = SolrMetricTestUtils.getRandomScope(random(), true);
+    SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random(), true);
+    Map<String, Counter> metrics = SolrMetricTestUtils.getRandomMetrics(random(), true);
+    SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics);
+    coreMetricManager.registerMetricProducer(scope, producer);
+    Set<ObjectInstance> objects = mBeanServer.queryMBeans(null, null);
+    assertEquals(metrics.size(), objects.stream().
+        filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) &&
+            root1.equals(o.getObjectName().getDomain())).count());
+    assertEquals(0, objects.stream().
+        filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) &&
+            root2.equals(o.getObjectName().getDomain())).count());
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
index 47bf8e7..a8f3343 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java
@@ -57,7 +57,7 @@ public class SolrSlf4jReporterTest extends SolrTestCaseJ4 {
     h.coreName = DEFAULT_TEST_CORENAME;
     SolrMetricManager metricManager = cc.getMetricManager();
     Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.node");
-    assertEquals(2, reporters.size());
+    assertTrue(reporters.toString(), reporters.size() >= 2);
     SolrMetricReporter reporter = reporters.get("test1");
     assertNotNull(reporter);
     assertTrue(reporter instanceof SolrSlf4jReporter);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
index 91952b8..f527a17 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
@@ -28,6 +28,7 @@ import org.apache.solr.core.SolrCore;
 import org.apache.solr.metrics.AggregateMetric;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricReporter;
+import org.apache.solr.util.JmxUtil;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -38,11 +39,13 @@ import org.junit.Test;
 public class SolrCloudReportersTest extends SolrCloudTestCase {
   int leaderRegistries;
   int clusterRegistries;
+  static int jmxReporter;
 
 
   @BeforeClass
   public static void configureDummyCluster() throws Exception {
     configureCluster(0).configure();
+    jmxReporter = JmxUtil.findFirstMBeanServer() != null ? 1 : 0;
   }
 
   @Before
@@ -67,7 +70,7 @@ public class SolrCloudReportersTest extends SolrCloudTestCase {
     cluster.getJettySolrRunners().forEach(jetty -> {
       CoreContainer cc = jetty.getCoreContainer();
       // verify registry names
-      for (String name : cc.getCoreNames()) {
+      for (String name : cc.getLoadedCoreNames()) {
         SolrCore core = cc.getCore(name);
         try {
           String registryName = core.getCoreMetricManager().getRegistryName();
@@ -97,7 +100,7 @@ public class SolrCloudReportersTest extends SolrCloudTestCase {
       assertEquals(5, sor.getPeriod());
       for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
         reporters = metricManager.getReporters(registryName);
-        assertEquals(reporters.toString(), 1, reporters.size());
+        assertEquals(reporters.toString(), 1 + jmxReporter, reporters.size());
         reporter = null;
         for (String name : reporters.keySet()) {
           if (name.startsWith("test")) {
@@ -156,7 +159,7 @@ public class SolrCloudReportersTest extends SolrCloudTestCase {
       assertEquals(reporters.toString(), 0, reporters.size());
       for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
         reporters = metricManager.getReporters(registryName);
-        assertEquals(reporters.toString(), 0, reporters.size());
+        assertEquals(reporters.toString(), 0 + jmxReporter, reporters.size());
       }
     });
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
index 9ce3762..51c2a5d 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
@@ -63,7 +63,7 @@ public class SolrShardReporterTest extends AbstractFullDistribZkTestBase {
     for (JettySolrRunner jetty : jettys) {
       CoreContainer cc = jetty.getCoreContainer();
       SolrMetricManager metricManager = cc.getMetricManager();
-      for (final String coreName : cc.getCoreNames()) {
+      for (final String coreName : cc.getLoadedCoreNames()) {
         CoreDescriptor cd = cc.getCoreDescriptor(coreName);
         if (cd.getCloudDescriptor() == null) { // not a cloud collection
           continue;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java
index d591b9a..4f53609 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java
@@ -23,7 +23,7 @@ public class TestFieldResource extends SolrRestletTestBase {
   public void testGetField() throws Exception {
     assertQ("/schema/fields/test_postv?indent=on&wt=xml&showDefaults=true",
             "count(/response/lst[@name='field']) = 1",
-            "count(/response/lst[@name='field']/*) = 17",
+            "count(/response/lst[@name='field']/*) = 18",
             "/response/lst[@name='field']/str[@name='name'] = 'test_postv'",
             "/response/lst[@name='field']/str[@name='type'] = 'text'",
             "/response/lst[@name='field']/bool[@name='indexed'] = 'true'",
@@ -38,6 +38,7 @@ public class TestFieldResource extends SolrRestletTestBase {
             "/response/lst[@name='field']/bool[@name='omitPositions'] = 'false'",
             "/response/lst[@name='field']/bool[@name='storeOffsetsWithPositions'] = 'false'",
             "/response/lst[@name='field']/bool[@name='multiValued'] = 'false'",
+            "/response/lst[@name='field']/bool[@name='large'] = 'false'",
             "/response/lst[@name='field']/bool[@name='required'] = 'false'",
             "/response/lst[@name='field']/bool[@name='tokenized'] = 'true'",
             "/response/lst[@name='field']/bool[@name='useDocValuesAsStored'] = 'true'");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
index 26fcde1..5cacc48 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.rest.schema.analysis;
 import java.io.File;
+import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -154,13 +155,30 @@ public class TestManagedSynonymFilterFactory extends RestTestBase {
             "count(/response/lst[@name='field']) = 1",
             "/response/lst[@name='responseHeader']/int[@name='status'] = '0'");
 
+    // multi-term synonym logic - SOLR-10264
+    final String multiTermOrigin;
+    final String multiTermSynonym;
+    if (random().nextBoolean()) {
+      multiTermOrigin  = "hansestadt hamburg";
+      multiTermSynonym = "hh";
+    } else {
+      multiTermOrigin  = "hh";
+      multiTermSynonym = "hansestadt hamburg";
+    }
+    // multi-term logic similar to the angry/mad logic (angry ~ origin, mad ~ synonym)
+
     assertU(adoc(newFieldName, "I am a happy test today but yesterday I was angry", "id", "5150"));
+    assertU(adoc(newFieldName, multiTermOrigin+" is in North Germany.", "id", "040"));
     assertU(commit());
 
     assertQ("/select?q=" + newFieldName + ":angry",
             "/response/lst[@name='responseHeader']/int[@name='status'] = '0'",
             "/response/result[@name='response'][@numFound='1']",
             "/response/result[@name='response']/doc/str[@name='id'][.='5150']");    
+    assertQ("/select?q=" + newFieldName + ":"+URLEncoder.encode(multiTermOrigin, "UTF-8"),
+        "/response/lst[@name='responseHeader']/int[@name='status'] = '0'",
+        "/response/result[@name='response'][@numFound='1']",
+        "/response/result[@name='response']/doc/str[@name='id'][.='040']");
     
     // add a mapping that will expand a query for "mad" to match docs with "angry"
     syns = new HashMap<>();
@@ -172,12 +190,28 @@ public class TestManagedSynonymFilterFactory extends RestTestBase {
     assertJQ(endpoint, 
         "/synonymMappings/managedMap/mad==['angry']");
 
+    // add a mapping that will expand a query for "multi-term synonym" to match docs with "acronym"
+    syns = new HashMap<>();
+    syns.put(multiTermSynonym, Arrays.asList(multiTermOrigin));
+    assertJPut(endpoint,
+               JSONUtil.toJSON(syns),
+               "/responseHeader/status==0");
+
+    assertJQ(endpoint+"/"+URLEncoder.encode(multiTermSynonym, "UTF-8"),
+        "/"+multiTermSynonym+"==['"+multiTermOrigin+"']");
+
     // should not match as the synonym mapping between mad and angry does not    
     // get applied until core reload
     assertQ("/select?q=" + newFieldName + ":mad",
         "/response/lst[@name='responseHeader']/int[@name='status'] = '0'",
         "/response/result[@name='response'][@numFound='0']");    
     
+    // should not match as the synonym mapping between "origin" and "synonym"
+    // was not added before the document was indexed
+    assertQ("/select?q=" + newFieldName + ":("+URLEncoder.encode(multiTermSynonym, "UTF-8") + ")&sow=false",
+        "/response/lst[@name='responseHeader']/int[@name='status'] = '0'",
+        "/response/result[@name='response'][@numFound='0']");
+
     restTestHarness.reload();
 
     // now query for mad and we should see our test doc
@@ -186,6 +220,12 @@ public class TestManagedSynonymFilterFactory extends RestTestBase {
         "/response/result[@name='response'][@numFound='1']",
         "/response/result[@name='response']/doc/str[@name='id'][.='5150']");    
     
+    // now query for "synonym" and we should see our test doc with "origin"
+    assertQ("/select?q=" + newFieldName + ":("+URLEncoder.encode(multiTermSynonym, "UTF-8") + ")&sow=false",
+        "/response/lst[@name='responseHeader']/int[@name='status'] = '0'",
+        "/response/result[@name='response'][@numFound='1']",
+        "/response/result[@name='response']/doc/str[@name='id'][.='040']");
+
     // test for SOLR-6015
     syns = new HashMap<>();
     syns.put("mb", Arrays.asList("megabyte"));    

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
index cf43a68..2d8afee 100644
--- a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
@@ -25,10 +25,14 @@ import java.util.List;
 import java.util.function.Function;
 import java.util.function.Supplier;
 
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.queries.function.FunctionValues;
@@ -151,6 +155,74 @@ public class DocValuesTest extends SolrTestCaseJ4 {
     }
   }
 
+
+  public void testHalfAndHalfDocValues() throws Exception {
+    // Insert two docs without docvalues
+    String fieldname = "string_add_dv_later";
+    assertU(adoc("id", "3", fieldname, "c"));
+    assertU(commit());
+    assertU(adoc("id", "1", fieldname, "a"));
+    assertU(commit());
+    
+   
+    try (SolrCore core = h.getCoreInc()) {
+        assertFalse(core.getLatestSchema().getField(fieldname).hasDocValues());
+      // Add docvalues to the field type
+      IndexSchema schema = core.getLatestSchema();
+      SchemaField oldField = schema.getField(fieldname);
+      int newProperties = oldField.getProperties() | SchemaField.DOC_VALUES;
+      
+      SchemaField sf = new SchemaField( fieldname, oldField.getType(), newProperties, null);
+      schema.getFields().put( fieldname, sf );
+      
+      // Insert a new doc with docvalues
+      assertU(adoc("id", "2", fieldname, "b"));
+      assertU(commit());
+    
+    
+      // Check there are a mix of segments with and without docvalues
+      final RefCounted<SolrIndexSearcher> searcherRef = core.openNewSearcher(true, true);
+      final SolrIndexSearcher searcher = searcherRef.get();
+      try {
+        final DirectoryReader topReader = searcher.getRawReader();
+
+        //Assert no merges
+        
+        assertEquals(3, topReader.numDocs());
+        assertEquals(3, topReader.leaves().size());
+        
+        final FieldInfos infos = MultiFields.getMergedFieldInfos(topReader);
+        //The global field type should have docValues because a document with dvs was added
+        assertEquals(DocValuesType.SORTED, infos.fieldInfo(fieldname).getDocValuesType());
+        
+        for(LeafReaderContext ctx: topReader.leaves()) {
+          LeafReader r = ctx.reader();
+          //Make sure there were no merges
+          assertEquals(1, r.numDocs());
+          Document doc = r.document(0);
+          String id = doc.getField("id").stringValue();
+          
+          if(id.equals("1") || id.equals("3")) {
+            assertEquals(DocValuesType.NONE, r.getFieldInfos().fieldInfo(fieldname).getDocValuesType());
+          } else {
+            assertEquals(DocValuesType.SORTED, r.getFieldInfos().fieldInfo(fieldname).getDocValuesType());
+          }
+          
+        }
+      } finally {
+        searcherRef.decref();
+      }
+    }
+    
+    // Assert sort order is correct
+    assertQ(req("q", "string_add_dv_later:*", "sort", "string_add_dv_later asc"),
+        "//*[@numFound='3']",
+        "//result/doc[1]/int[@name='id'][.=1]",
+        "//result/doc[2]/int[@name='id'][.=2]",
+        "//result/doc[3]/int[@name='id'][.=3]"
+    );
+  }
+
   private void tstToObj(SchemaField sf, Object o) {
     List<IndexableField> fields = sf.createFields(o);
     for (IndexableField field : fields) {


[06/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java
index 17abf78..b9a2e78 100644
--- a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java
+++ b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java
@@ -21,14 +21,16 @@ import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 import java.util.Random;
 
+import com.codahale.metrics.Metric;
 import org.apache.lucene.search.BoostQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.join.ScoreMode;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.response.SolrQueryResponse;
@@ -198,22 +200,23 @@ public class TestScoreJoinQPScore extends SolrTestCaseJ4 {
   public void testCacheHit() throws Exception {
     indexDataForScorring();
 
-    SolrCache cache = (SolrCache) h.getCore().getInfoRegistry()
-        .get("queryResultCache");
+    Map<String, Metric> metrics = h.getCoreContainer().getMetricManager().registry(h.getCore().getCoreMetricManager().getRegistryName()).getMetrics();
+
+    MetricsMap mm = (MetricsMap)metrics.get("CACHE.searcher.queryResultCache");
     {
-      final NamedList statPre = cache.getStatistics();
+      Map<String,Object> statPre = mm.getValue();
       h.query(req("q", "{!join from=movieId_s to=id score=Avg}title:first", "fl", "id", "omitHeader", "true"));
-      assertHitOrInsert(cache, statPre);
+      assertHitOrInsert(mm.getValue(), statPre);
     }
 
     {
-      final NamedList statPre = cache.getStatistics();
+      Map<String,Object> statPre = mm.getValue();
       h.query(req("q", "{!join from=movieId_s to=id score=Avg}title:first", "fl", "id", "omitHeader", "true"));
-      assertHit(cache, statPre);
+      assertHit(mm.getValue(), statPre);
     }
 
     {
-      NamedList statPre = cache.getStatistics();
+      Map<String,Object> statPre = mm.getValue();
 
       Random r = random();
       boolean changed = false;
@@ -234,14 +237,14 @@ public class TestScoreJoinQPScore extends SolrTestCaseJ4 {
               //" b=" + boost + 
               "}" + q, "fl", "id", "omitHeader", "true")
       );
-      assertInsert(cache, statPre);
+      assertInsert(mm.getValue(), statPre);
 
-      statPre = cache.getStatistics();
+      statPre = mm.getValue();
       final String repeat = h.query(req("q", "{!join from=" + from + " to=" + to + " score=" + score.toLowerCase(Locale.ROOT) +
           //" b=" + boost
               "}" + q, "fl", "id", "omitHeader", "true")
       );
-      assertHit(cache, statPre);
+      assertHit(mm.getValue(), statPre);
 
       assertEquals("lowercase shouldn't change anything", resp, repeat);
 
@@ -254,6 +257,7 @@ public class TestScoreJoinQPScore extends SolrTestCaseJ4 {
     // this queries are not overlap, with other in this test case. 
     // however it might be better to extract this method into the separate suite
     // for a while let's nuke a cache content, in case of repetitions
+    SolrCache cache = (SolrCache)h.getCore().getInfoRegistry().get("queryResultCache");
     cache.clear();
   }
 
@@ -264,32 +268,32 @@ public class TestScoreJoinQPScore extends SolrTestCaseJ4 {
     return l.get(r.nextInt(l.size()));
   }
 
-  private void assertInsert(SolrCache cache, final NamedList statPre) {
+  private void assertInsert(Map<String,Object> current, final Map<String,Object> statPre) {
     assertEquals("it lookups", 1,
-        delta("lookups", cache.getStatistics(), statPre));
-    assertEquals("it doesn't hit", 0, delta("hits", cache.getStatistics(), statPre));
+        delta("lookups", current, statPre));
+    assertEquals("it doesn't hit", 0, delta("hits", current, statPre));
     assertEquals("it inserts", 1,
-        delta("inserts", cache.getStatistics(), statPre));
+        delta("inserts", current, statPre));
   }
 
-  private void assertHit(SolrCache cache, final NamedList statPre) {
+  private void assertHit(Map<String,Object> current, final Map<String,Object> statPre) {
     assertEquals("it lookups", 1,
-        delta("lookups", cache.getStatistics(), statPre));
-    assertEquals("it hits", 1, delta("hits", cache.getStatistics(), statPre));
+        delta("lookups", current, statPre));
+    assertEquals("it hits", 1, delta("hits", current, statPre));
     assertEquals("it doesn't insert", 0,
-        delta("inserts", cache.getStatistics(), statPre));
+        delta("inserts", current, statPre));
   }
 
-  private void assertHitOrInsert(SolrCache cache, final NamedList statPre) {
+  private void assertHitOrInsert(Map<String,Object> current, final Map<String,Object> statPre) {
     assertEquals("it lookups", 1,
-        delta("lookups", cache.getStatistics(), statPre));
-    final long mayHit = delta("hits", cache.getStatistics(), statPre);
+        delta("lookups", current, statPre));
+    final long mayHit = delta("hits", current, statPre);
     assertTrue("it may hit", 0 == mayHit || 1 == mayHit);
     assertEquals("or insert on cold", 1,
-        delta("inserts", cache.getStatistics(), statPre) + mayHit);
+        delta("inserts", current, statPre) + mayHit);
   }
 
-  private long delta(String key, NamedList a, NamedList b) {
+  private long delta(String key, Map<String,Object> a, Map<String,Object> b) {
     return (Long) a.get(key) - (Long) b.get(key);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java
new file mode 100644
index 0000000..f985b41
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.spelling;
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.SpellingParams;
+import org.apache.solr.handler.component.SpellCheckComponent;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class SpellCheckCollatorWithCollapseTest  extends SolrTestCaseJ4 {
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("solrconfig-collapseqparser.xml", "schema11.xml");
+  }
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    clearIndex();
+    assertU(commit());
+  }
+  
+  @Test
+  public void test() throws Exception {
+    for(int i=0 ; i<200 ; i++) {
+      String[] doc = {"id","" + i, "group_i", "" + (i % 10), "a_s", ((i%2)==0 ? "love" : "peace")};
+      assertU(adoc(doc));
+      if(i%5==0) {
+        assertU(commit());
+      }
+    }
+    assertU(commit());
+    assertQ(
+        req(
+            SpellCheckComponent.COMPONENT_NAME, "true",
+            SpellCheckComponent.SPELLCHECK_DICT, "direct",
+            SpellingParams.SPELLCHECK_COUNT, "10",
+            SpellingParams.SPELLCHECK_COLLATE, "true",
+            SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "5",
+            SpellingParams.SPELLCHECK_MAX_COLLATIONS, "1",
+            CommonParams.Q, "a_s:lpve",
+            CommonParams.QT, "spellCheckCompRH_Direct",
+            SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, "5",
+            CommonParams.FQ, "{!collapse field=group_i}",
+            "expand", "true"),
+        "//lst[@name='spellcheck']/lst[@name='collations']/str[@name='collation']='a_s:love'");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java
index ed0749f..821fe73 100644
--- a/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java
+++ b/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java
@@ -94,6 +94,16 @@ public class SpellingQueryConverterTest extends LuceneTestCase {
     assertTrue("tokens is null and it shouldn't be", tokens != null);
     assertEquals("tokens Size: " + tokens.size() + " is not 1", 1, tokens.size());
     assertTrue("Token offsets do not match", isOffsetCorrect(original, tokens));
+    
+    String firstKeyword = "value1";
+    String secondKeyword = "value2";
+    original = "field-with-parenthesis:(" + firstKeyword + " " + secondKeyword + ")";
+    tokens = converter.convert(original);
+    assertTrue("tokens is null and it shouldn't be", tokens != null);
+    assertEquals("tokens Size: " + tokens.size() + " is not 2", 2, tokens.size());
+    assertTrue("Token offsets do not match", isOffsetCorrect(original, tokens));
+    assertTrue("first Token is not " + firstKeyword, new ArrayList<>(tokens).get(0).toString().equals(firstKeyword));
+    assertTrue("second Token is not " + secondKeyword, new ArrayList<>(tokens).get(1).toString().equals(secondKeyword));    
   }
 
   private boolean isOffsetCorrect(String s, Collection<Token> tokens) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java b/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java
index e91d762..534793f 100644
--- a/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java
+++ b/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java
@@ -17,9 +17,12 @@
 package org.apache.solr.store.blockcache;
 
 import java.math.BigDecimal;
+import java.util.Map;
 
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.common.util.NamedList;
+import org.apache.lucene.util.TestUtil;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -27,12 +30,18 @@ public class BufferStoreTest extends LuceneTestCase {
   private final static int blockSize = 1024;
 
   private Metrics metrics;
+  private MetricsMap metricsMap;
 
   private Store store;
 
   @Before
   public void setup() {
     metrics = new Metrics();
+    SolrMetricManager metricManager = new SolrMetricManager();
+    String registry = TestUtil.randomSimpleString(random(), 2, 10);
+    String scope = TestUtil.randomSimpleString(random(), 2, 10);
+    metrics.initializeMetrics(metricManager, registry, scope);
+    metricsMap = (MetricsMap) metricManager.registry(registry).getMetrics().get("CACHE." + scope + ".hdfsBlockCache");
     BufferStore.initNewBuffer(blockSize, blockSize, metrics);
     store = BufferStore.instance(blockSize);
   }
@@ -77,7 +86,7 @@ public class BufferStoreTest extends LuceneTestCase {
    *          whether buffers should have been lost since the last call
    */
   private void assertGaugeMetricsChanged(boolean allocated, boolean lost) {
-    NamedList<Number> stats = metrics.getStatistics();
+    Map<String,Object> stats = metricsMap.getValue();
 
     assertEquals("Buffer allocation metric not updating correctly.",
         allocated, isMetricPositive(stats, "buffercache.allocations"));
@@ -85,7 +94,7 @@ public class BufferStoreTest extends LuceneTestCase {
         lost, isMetricPositive(stats, "buffercache.lost"));
   }
 
-  private boolean isMetricPositive(NamedList<Number> stats, String metric) {
+  private boolean isMetricPositive(Map<String,Object> stats, String metric) {
     return new BigDecimal(stats.get(metric).toString()).compareTo(BigDecimal.ZERO) > 0;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java b/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java
index 6bccd95..e8a85bd 100644
--- a/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java
+++ b/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java
@@ -101,7 +101,7 @@ public class VersionInfoTest extends SolrTestCaseJ4 {
         versionFromTLog, versionFromIndex);
 
     // reload the core, which should reset the max
-    CoreContainer coreContainer = req.getCore().getCoreDescriptor().getCoreContainer();
+    CoreContainer coreContainer = req.getCore().getCoreContainer();
     coreContainer.reload(req.getCore().getName());
     maxVersionFromIndex = ulog.getMaxVersionFromIndex();
     assertEquals("max version from index should be equal to version of last doc added after reload",

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java
index 432bb02..aa536a7 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java
@@ -99,7 +99,7 @@ public class ClassificationUpdateProcessorTest extends SolrTestCaseJ4 {
     updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema());
     updateProcessorToTest.processAdd(update);
 
-    assertThat(unseenDocument1.getFieldValue(PREDICTED_CLASS),is("class1"));
+    assertThat(unseenDocument1.getFieldValue(PREDICTED_CLASS),is("class2"));
   }
 
   @Test
@@ -119,7 +119,7 @@ public class ClassificationUpdateProcessorTest extends SolrTestCaseJ4 {
     updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema());
     updateProcessorToTest.processAdd(update);
 
-    assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS),is("class1"));
+    assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS),is("class2"));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java b/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java
index 054415a..5e8eab0 100644
--- a/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java
+++ b/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java
@@ -26,7 +26,7 @@ import org.apache.solr.core.CoreDescriptor;
 public class MockCoreContainer extends CoreContainer {
   public static class MockCoreDescriptor extends CoreDescriptor {
     public MockCoreDescriptor() {
-      super(new MockCoreContainer(), "mock", Paths.get("path"));
+      super("mock", Paths.get("path"), null, false);
     }
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
index aa02de5..4eb1169 100644
--- a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
+++ b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
@@ -18,6 +18,7 @@
 package org.apache.solr.util.stats;
 
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
@@ -45,7 +46,11 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
       timer.update(Math.abs(random().nextInt()) + 1, TimeUnit.NANOSECONDS);
     }
     // obtain timer metrics
-    NamedList lst = new NamedList(MetricUtils.convertTimer(timer, false));
+    Map<String,Object> map = new HashMap<>();
+    MetricUtils.convertTimer("", timer, MetricUtils.PropertyFilter.ALL, false, false, (k, v) -> {
+      map.putAll((Map<String,Object>)v);
+    });
+    NamedList lst = new NamedList(map);
     // check that expected metrics were obtained
     assertEquals(14, lst.size());
     final Snapshot snapshot = timer.getSnapshot();
@@ -84,7 +89,7 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
     Gauge<Long> error = () -> {throw new InternalError("Memory Pool not found error");};
     registry.register("memory.expected.error", error);
     MetricUtils.toMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL,
-        false, false, false, (k, o) -> {
+        MetricUtils.PropertyFilter.ALL, false, false, false, false, (k, o) -> {
       Map v = (Map)o;
       if (k.startsWith("counter")) {
         assertEquals(1L, v.get("count"));
@@ -114,7 +119,7 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
     });
     // test compact format
     MetricUtils.toMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL,
-        false, false, true, (k, o) -> {
+        MetricUtils.PropertyFilter.ALL, false, false, true, false, (k, o) -> {
           if (k.startsWith("counter")) {
             assertTrue(o instanceof Long);
             assertEquals(1L, o);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index ad85256..5c34cb6 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -16,7 +16,6 @@
  */
 package org.apache.solr.client.solrj.impl;
 
-import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.net.ConnectException;
@@ -84,8 +83,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.MDC;
 
-import static org.apache.solr.common.params.CommonParams.ID;
 import static org.apache.solr.common.params.CommonParams.ADMIN_PATHS;
+import static org.apache.solr.common.params.CommonParams.ID;
 
 /**
  * SolrJ client class to communicate with SolrCloud.
@@ -393,7 +392,7 @@ public class CloudSolrClient extends SolrClient {
    */
   @Deprecated
   public CloudSolrClient(Collection<String> zkHosts, String chroot, HttpClient httpClient, LBHttpSolrClient lbSolrClient, boolean updatesToLeaders) {
-    this(zkHosts, chroot, httpClient, lbSolrClient, null, updatesToLeaders, false, null);
+    this(zkHosts, chroot, null, httpClient, lbSolrClient, null, updatesToLeaders, false, null);
   }
 
   /**
@@ -407,9 +406,14 @@ public class CloudSolrClient extends SolrClient {
    *          each host in the zookeeper ensemble. Note that with certain
    *          Collection types like HashSet, the order of hosts in the final
    *          connect string may not be in the same order you added them.
+   *          Provide only one of solrUrls or zkHosts.
    * @param chroot
    *          A chroot value for zookeeper, starting with a forward slash. If no
    *          chroot is required, use null.
+   * @param solrUrls
+   *          A list of Solr URLs to configure the underlying {@link HttpClusterStateProvider}, which will
+   *          use of the these URLs to fetch the list of live nodes for this Solr cluster. Provide only
+   *          one of solrUrls or zkHosts.
    * @param httpClient
    *          the {@link HttpClient} instance to be used for all requests. The provided httpClient should use a
    *          multi-threaded connection manager.  If null, a default HttpClient will be used.
@@ -424,6 +428,7 @@ public class CloudSolrClient extends SolrClient {
    */
   private CloudSolrClient(Collection<String> zkHosts,
                           String chroot,
+                          List<String> solrUrls,
                           HttpClient httpClient,
                           LBHttpSolrClient lbSolrClient,
                           LBHttpSolrClient.Builder lbHttpSolrClientBuilder,
@@ -433,7 +438,21 @@ public class CloudSolrClient extends SolrClient {
 
   ) {
     if (stateProvider == null) {
-      this.stateProvider = new ZkClientClusterStateProvider(zkHosts, chroot);
+      if (zkHosts != null && solrUrls != null) {
+        throw new IllegalArgumentException("Both zkHost(s) & solrUrl(s) have been specified. Only specify one.");
+      }
+      if (zkHosts != null) {
+        this.stateProvider = new ZkClientClusterStateProvider(zkHosts, chroot);
+      } else if (solrUrls != null && !solrUrls.isEmpty()) {
+        try {
+          this.stateProvider = new HttpClusterStateProvider(solrUrls, httpClient);
+        } catch (Exception e) {
+          throw new RuntimeException("Couldn't initialize a HttpClusterStateProvider (is/are the "
+              + "Solr server(s), "  + solrUrls + ", down?)", e);
+        }
+      } else {
+        throw new IllegalArgumentException("Both zkHosts and solrUrl cannot be null.");
+      }
     } else {
       this.stateProvider = stateProvider;
     }
@@ -1260,7 +1279,7 @@ public class CloudSolrClient extends SolrClient {
       Set<String> liveNodes = stateProvider.liveNodes();
       for (String liveNode : liveNodes) {
         theUrlList.add(ZkStateReader.getBaseUrlForNodeName(liveNode,
-            (String) stateProvider.getClusterProperties().getOrDefault(ZkStateReader.URL_SCHEME,"http")));
+            (String) stateProvider.getClusterProperty(ZkStateReader.URL_SCHEME,"http")));
       }
     } else {
       
@@ -1366,7 +1385,7 @@ public class CloudSolrClient extends SolrClient {
     return rsp.getResponse();
   }
 
-  Set<String> getCollectionNames(String collection) {
+  private Set<String> getCollectionNames(String collection) {
     // Extract each comma separated collection name and store in a List.
     List<String> rawCollectionsList = StrUtils.splitSmart(collection, ",", true);
     Set<String> collectionNames = new HashSet<>();
@@ -1603,6 +1622,7 @@ public class CloudSolrClient extends SolrClient {
    */
   public static class Builder {
     private Collection<String> zkHosts;
+    private List<String> solrUrls;
     private HttpClient httpClient;
     private String zkChroot;
     private LBHttpSolrClient loadBalancedSolrClient;
@@ -1614,6 +1634,7 @@ public class CloudSolrClient extends SolrClient {
 
     public Builder() {
       this.zkHosts = new ArrayList();
+      this.solrUrls = new ArrayList();
       this.shardLeadersOnly = true;
     }
     
@@ -1630,8 +1651,29 @@ public class CloudSolrClient extends SolrClient {
       this.zkHosts.add(zkHost);
       return this;
     }
+
+    /**
+     * Provide a Solr URL to be used when configuring {@link CloudSolrClient} instances.
+     *
+     * Method may be called multiple times. One of the provided values will be used to fetch
+     * the list of live Solr nodes that the underlying {@link HttpClusterStateProvider} would be maintaining.
+     */
+    public Builder withSolrUrl(String solrUrl) {
+      this.solrUrls.add(solrUrl);
+      return this;
+    }
     
     /**
+     * Provide a list of Solr URL to be used when configuring {@link CloudSolrClient} instances.
+     * One of the provided values will be used to fetch the list of live Solr
+     * nodes that the underlying {@link HttpClusterStateProvider} would be maintaining.
+     */
+    public Builder withSolrUrl(Collection<String> solrUrls) {
+      this.solrUrls.addAll(solrUrls);
+      return this;
+    }
+
+    /**
      * Provides a {@link HttpClient} for the builder to use when creating clients.
      */
     public Builder withLBHttpSolrClientBuilder(LBHttpSolrClient.Builder lbHttpSolrClientBuilder) {
@@ -1723,25 +1765,22 @@ public class CloudSolrClient extends SolrClient {
      */
     public CloudSolrClient build() {
       if (stateProvider == null) {
-        stateProvider = new ZkClientClusterStateProvider(zkHosts, zkChroot);
+        if (!zkHosts.isEmpty()) {
+          stateProvider = new ZkClientClusterStateProvider(zkHosts, zkChroot);
+        }
+        else if (!this.solrUrls.isEmpty()) {
+          try {
+            stateProvider = new HttpClusterStateProvider(solrUrls, httpClient);
+          } catch (Exception e) {
+            throw new RuntimeException("Couldn't initialize a HttpClusterStateProvider (is/are the "
+                + "Solr server(s), "  + solrUrls + ", down?)", e);
+          }
+        } else {
+          throw new IllegalArgumentException("Both zkHosts and solrUrl cannot be null.");
+        }
       }
-      return new CloudSolrClient(zkHosts, zkChroot, httpClient, loadBalancedSolrClient, lbClientBuilder,
+      return new CloudSolrClient(zkHosts, zkChroot, solrUrls, httpClient, loadBalancedSolrClient, lbClientBuilder,
           shardLeadersOnly, directUpdatesToLeadersOnly, stateProvider);
     }
   }
-
-  public interface ClusterStateProvider extends Closeable {
-
-    ClusterState.CollectionRef getState(String collection);
-
-    Set<String> liveNodes();
-
-    String getAlias(String collection);
-
-    String getCollectionName(String name);
-
-    Map<String, Object> getClusterProperties();
-
-    void connect();
-  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java
new file mode 100644
index 0000000..b913cd4
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.impl;
+
+import java.io.Closeable;
+import java.util.Set;
+
+import org.apache.solr.common.cloud.ClusterState;
+
+public interface ClusterStateProvider extends Closeable {
+
+  /**
+   * Obtain the state of the collection (cluster status).
+   * @return the collection state, or null is collection doesn't exist
+   */
+  ClusterState.CollectionRef getState(String collection);
+
+  /**
+   * Obtain set of live_nodes for the cluster.
+   */
+  Set<String> liveNodes();
+
+  /**
+   * Given an alias, returns the collection name that this alias points to
+   */
+  String getAlias(String alias);
+
+  /**
+   * Given a name, returns the collection name if an alias by that name exists, or
+   * returns the name itself, if no alias exists.
+   */
+  String getCollectionName(String name);
+
+  /**
+   * Obtain a cluster property, or null if it doesn't exist.
+   */
+  Object getClusterProperty(String propertyName);
+
+  /**
+   * Obtain a cluster property, or the default value if it doesn't exist.
+   */
+  Object getClusterProperty(String propertyName, String def);
+
+  void connect();
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java
new file mode 100644
index 0000000..1fb9415
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.impl;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.http.client.HttpClient;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.ClusterState.CollectionRef;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HttpClusterStateProvider implements ClusterStateProvider {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private String urlScheme;
+  volatile Set<String> liveNodes;
+  long liveNodesTimestamp = 0;
+  volatile Map<String, String> aliases;
+  long aliasesTimestamp = 0;
+
+  private int cacheTimeout = 5; // the liveNodes and aliases cache will be invalidated after 5 secs
+  final HttpClient httpClient;
+  final boolean clientIsInternal;
+
+  public HttpClusterStateProvider(List<String> solrUrls, HttpClient httpClient) throws Exception {
+    this.httpClient = httpClient == null? HttpClientUtil.createClient(null): httpClient;
+    this.clientIsInternal = httpClient == null;
+    for (String solrUrl: solrUrls) {
+      urlScheme = solrUrl.startsWith("https")? "https": "http";
+      try (SolrClient initialClient = new HttpSolrClient.Builder().withBaseSolrUrl(solrUrl).withHttpClient(httpClient).build()) {
+        Set<String> liveNodes = fetchLiveNodes(initialClient); // throws exception if unable to fetch
+        this.liveNodes = liveNodes;
+        liveNodesTimestamp = System.nanoTime();
+        break;
+      } catch (IOException e) {
+        log.warn("Attempt to fetch live_nodes from " + solrUrl + " failed.", e);
+      }
+    }
+
+    if (this.liveNodes == null || this.liveNodes.isEmpty()) {
+      throw new RuntimeException("Tried fetching live_nodes using Solr URLs provided, i.e. " + solrUrls + ". However, "
+          + "succeeded in obtaining the cluster state from none of them."
+          + "If you think your Solr cluster is up and is accessible,"
+          + " you could try re-creating a new CloudSolrClient using working"
+          + " solrUrl(s) or zkHost(s).");
+    }
+  }
+
+  @Override
+  public void close() throws IOException {
+    if (this.clientIsInternal && this.httpClient != null) {
+      HttpClientUtil.close(httpClient);
+    }
+  }
+
+  @Override
+  public CollectionRef getState(String collection) {
+    for (String nodeName: liveNodes) {
+      try (HttpSolrClient client = new HttpSolrClient.Builder().
+          withBaseSolrUrl(ZkStateReader.getBaseUrlForNodeName(nodeName, urlScheme)).
+          withHttpClient(httpClient).build()) {
+        ClusterState cs = fetchClusterState(client, collection);
+        return cs.getCollectionRef(collection);
+      } catch (SolrServerException | RemoteSolrException | IOException e) {
+        if (e.getMessage().contains(collection + " not found")) {
+          // Cluster state for the given collection was not found.
+          // Lets fetch/update our aliases:
+          getAliases(true);
+          return null;
+        }
+        log.warn("Attempt to fetch cluster state from " +
+            ZkStateReader.getBaseUrlForNodeName(nodeName, urlScheme) + " failed.", e);
+      }
+    }
+    throw new RuntimeException("Tried fetching cluster state using the node names we knew of, i.e. " + liveNodes +". However, "
+        + "succeeded in obtaining the cluster state from none of them."
+        + "If you think your Solr cluster is up and is accessible,"
+        + " you could try re-creating a new CloudSolrClient using working"
+        + " solrUrl(s) or zkHost(s).");
+  }
+
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  private ClusterState fetchClusterState(SolrClient client, String collection) throws SolrServerException, IOException {
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.set("collection", collection);
+    params.set("action", "CLUSTERSTATUS");
+    QueryRequest request = new QueryRequest(params);
+    request.setPath("/admin/collections");
+    NamedList cluster = (SimpleOrderedMap) client.request(request).get("cluster");
+    Map<String, Object> collectionsMap = Collections.singletonMap(collection,
+        ((NamedList) cluster.get("collections")).get(collection));
+    int znodeVersion = (int)((Map<String, Object>)(collectionsMap).get(collection)).get("znodeVersion");
+    Set<String> liveNodes = new HashSet((List<String>)(cluster.get("live_nodes")));
+    this.liveNodes = liveNodes;
+    liveNodesTimestamp = System.nanoTime();
+    ClusterState cs = ClusterState.load(znodeVersion, collectionsMap, liveNodes, ZkStateReader.CLUSTER_STATE);
+    return cs;
+  }
+
+  @Override
+  public Set<String> liveNodes() {
+    if (liveNodes == null) {
+      throw new RuntimeException("We don't know of any live_nodes to fetch the"
+          + " latest live_nodes information from. "
+          + "If you think your Solr cluster is up and is accessible,"
+          + " you could try re-creating a new CloudSolrClient using working"
+          + " solrUrl(s) or zkHost(s).");
+    }
+    if (TimeUnit.SECONDS.convert((System.nanoTime() - liveNodesTimestamp), TimeUnit.NANOSECONDS) > getCacheTimeout()) {
+      for (String nodeName: liveNodes) {
+        try (HttpSolrClient client = new HttpSolrClient.Builder().
+            withBaseSolrUrl(ZkStateReader.getBaseUrlForNodeName(nodeName, urlScheme)).
+            withHttpClient(httpClient).build()) {
+          Set<String> liveNodes = fetchLiveNodes(client);
+          this.liveNodes = (liveNodes);
+          liveNodesTimestamp = System.nanoTime();
+          return liveNodes;
+        } catch (Exception e) {
+          log.warn("Attempt to fetch live_nodes from " +
+              ZkStateReader.getBaseUrlForNodeName(nodeName, urlScheme) + " failed.", e);
+        }
+      }
+      throw new RuntimeException("Tried fetching live_nodes using all the node names we knew of, i.e. " + liveNodes +". However, "
+          + "succeeded in obtaining the cluster state from none of them."
+          + "If you think your Solr cluster is up and is accessible,"
+          + " you could try re-creating a new CloudSolrClient using working"
+          + " solrUrl(s) or zkHost(s).");
+    } else {
+      return liveNodes; // cached copy is fresh enough
+    }
+  }
+
+  private static Set<String> fetchLiveNodes(SolrClient client) throws Exception {
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.set("action", "CLUSTERSTATUS");
+    QueryRequest request = new QueryRequest(params);
+    request.setPath("/admin/collections");
+    NamedList cluster = (SimpleOrderedMap) client.request(request).get("cluster");
+    Set<String> liveNodes = new HashSet((List<String>)(cluster.get("live_nodes")));
+    return liveNodes;
+  }
+
+  @Override
+  public String getAlias(String alias) {
+    Map<String, String> aliases = getAliases(false);
+    return aliases.get(alias);
+  }
+
+  private Map<String, String> getAliases(boolean forceFetch) {
+    if (this.liveNodes == null) {
+      throw new RuntimeException("We don't know of any live_nodes to fetch the"
+          + " latest aliases information from. "
+          + "If you think your Solr cluster is up and is accessible,"
+          + " you could try re-creating a new CloudSolrClient using working"
+          + " solrUrl(s) or zkHost(s).");
+    }
+
+    if (forceFetch || this.aliases == null ||
+        TimeUnit.SECONDS.convert((System.nanoTime() - aliasesTimestamp), TimeUnit.NANOSECONDS) > getCacheTimeout()) {
+      for (String nodeName: liveNodes) {
+        try (HttpSolrClient client = new HttpSolrClient.Builder().
+            withBaseSolrUrl(ZkStateReader.getBaseUrlForNodeName(nodeName, urlScheme)).
+            withHttpClient(httpClient).build()) {
+
+          Map<String, String> aliases = new CollectionAdminRequest.ListAliases().process(client).getAliases();
+          this.aliases = aliases;
+          this.aliasesTimestamp = System.nanoTime();
+          return Collections.unmodifiableMap(aliases);
+        } catch (SolrServerException | RemoteSolrException | IOException e) {
+          // Situation where we're hitting an older Solr which doesn't have LISTALIASES
+          if (e instanceof RemoteSolrException && ((RemoteSolrException)e).code()==400) {
+            log.warn("LISTALIASES not found, possibly using older Solr server. Aliases won't work"
+                + " unless you re-create the CloudSolrClient using zkHost(s) or upgrade Solr server", e);
+            this.aliases = Collections.emptyMap();
+            this.aliasesTimestamp = System.nanoTime();
+            return aliases;
+          }
+          log.warn("Attempt to fetch cluster state from " +
+              ZkStateReader.getBaseUrlForNodeName(nodeName, urlScheme) + " failed.", e);
+        }
+      }
+
+      throw new RuntimeException("Tried fetching aliases using all the node names we knew of, i.e. " + liveNodes +". However, "
+          + "succeeded in obtaining the cluster state from none of them."
+          + "If you think your Solr cluster is up and is accessible,"
+          + " you could try re-creating a new CloudSolrClient using a working"
+          + " solrUrl or zkHost.");
+    } else {
+      return Collections.unmodifiableMap(this.aliases); // cached copy is fresh enough
+    }
+  }
+
+  @Override
+  public String getCollectionName(String name) {
+    Map<String, String> aliases = getAliases(false);
+    return aliases.containsKey(name) ? aliases.get(name): name;
+  }
+
+  @Override
+  public Object getClusterProperty(String propertyName) {
+    if (propertyName.equals(ZkStateReader.URL_SCHEME)) {
+      return this.urlScheme;
+    }
+    throw new UnsupportedOperationException("Fetching cluster properties not supported"
+        + " using the HttpClusterStateProvider. "
+        + "ZkClientClusterStateProvider can be used for this."); // TODO
+  }
+
+  @Override
+  public Object getClusterProperty(String propertyName, String def) {
+    if (propertyName.equals(ZkStateReader.URL_SCHEME)) {
+      return this.urlScheme;
+    }
+    throw new UnsupportedOperationException("Fetching cluster properties not supported"
+        + " using the HttpClusterStateProvider. "
+        + "ZkClientClusterStateProvider can be used for this."); // TODO
+  }
+
+  @Override
+  public void connect() {}
+
+  public int getCacheTimeout() {
+    return cacheTimeout;
+  }
+
+  public void setCacheTimeout(int cacheTimeout) {
+    this.cacheTimeout = cacheTimeout;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java
index 3cb59cd..ed6ae7b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java
@@ -360,10 +360,11 @@ public class LBHttpSolrClient extends SolrClient {
     boolean isNonRetryable = req.request instanceof IsUpdateRequest || ADMIN_PATHS.contains(req.request.getPath());
     List<ServerWrapper> skipped = null;
 
+    boolean timeAllowedExceeded = false;
     long timeAllowedNano = getTimeAllowedInNanos(req.getRequest());
     long timeOutTime = System.nanoTime() + timeAllowedNano;
     for (String serverStr : req.getServers()) {
-      if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+      if (timeAllowedExceeded = isTimeExceeded(timeAllowedNano, timeOutTime)) {
         break;
       }
       
@@ -400,22 +401,33 @@ public class LBHttpSolrClient extends SolrClient {
     // try the servers we previously skipped
     if (skipped != null) {
       for (ServerWrapper wrapper : skipped) {
-        if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+        if (timeAllowedExceeded = isTimeExceeded(timeAllowedNano, timeOutTime)) {
           break;
         }
 
-        ex = doRequest(wrapper.client, req, rsp, isNonRetryable, true, wrapper.getKey());
-        if (ex == null) {
-          return rsp; // SUCCESS
+        try {
+          MDC.put("LBHttpSolrClient.url", wrapper.client.getBaseURL());
+          ex = doRequest(wrapper.client, req, rsp, isNonRetryable, true, wrapper.getKey());
+          if (ex == null) {
+            return rsp; // SUCCESS
+          }
+        } finally {
+          MDC.remove("LBHttpSolrClient.url");
         }
       }
     }
 
 
+    final String solrServerExceptionMessage;
+    if (timeAllowedExceeded) {
+      solrServerExceptionMessage = "Time allowed to handle this request exceeded";
+    } else {
+      solrServerExceptionMessage = "No live SolrServers available to handle this request";
+    }
     if (ex == null) {
-      throw new SolrServerException("No live SolrServers available to handle this request");
+      throw new SolrServerException(solrServerExceptionMessage);
     } else {
-      throw new SolrServerException("No live SolrServers available to handle this request:" + zombieServers.keySet(), ex);
+      throw new SolrServerException(solrServerExceptionMessage+":" + zombieServers.keySet(), ex);
     }
 
   }
@@ -588,10 +600,11 @@ public class LBHttpSolrClient extends SolrClient {
     int maxTries = serverList.length;
     Map<String,ServerWrapper> justFailed = null;
 
+    boolean timeAllowedExceeded = false;
     long timeAllowedNano = getTimeAllowedInNanos(request);
     long timeOutTime = System.nanoTime() + timeAllowedNano;
     for (int attempts=0; attempts<maxTries; attempts++) {
-      if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+      if (timeAllowedExceeded = isTimeExceeded(timeAllowedNano, timeOutTime)) {
         break;
       }
       
@@ -619,7 +632,7 @@ public class LBHttpSolrClient extends SolrClient {
 
     // try other standard servers that we didn't try just now
     for (ServerWrapper wrapper : zombieServers.values()) {
-      if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+      if (timeAllowedExceeded = isTimeExceeded(timeAllowedNano, timeOutTime)) {
         break;
       }
       
@@ -646,10 +659,16 @@ public class LBHttpSolrClient extends SolrClient {
     }
 
 
+    final String solrServerExceptionMessage;
+    if (timeAllowedExceeded) {
+      solrServerExceptionMessage = "Time allowed to handle this request exceeded";
+    } else {
+      solrServerExceptionMessage = "No live SolrServers available to handle this request";
+    }
     if (ex == null) {
-      throw new SolrServerException("No live SolrServers available to handle this request");
+      throw new SolrServerException(solrServerExceptionMessage);
     } else {
-      throw new SolrServerException("No live SolrServers available to handle this request", ex);
+      throw new SolrServerException(solrServerExceptionMessage, ex);
     }
   }
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java
index 1e50127..1875c50 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ZkClientClusterStateProvider.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 
-public class ZkClientClusterStateProvider implements CloudSolrClient.ClusterStateProvider {
+public class ZkClientClusterStateProvider implements ClusterStateProvider {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
 
@@ -72,14 +72,24 @@ public class ZkClientClusterStateProvider implements CloudSolrClient.ClusterStat
 
 
   @Override
-  public String getAlias(String collection) {
+  public String getAlias(String alias) {
     Aliases aliases = zkStateReader.getAliases();
-    return aliases.getCollectionAlias(collection);
+    return aliases.getCollectionAlias(alias);
   }
 
   @Override
-  public Map<String, Object> getClusterProperties() {
-    return zkStateReader.getClusterProperties();
+  public Object getClusterProperty(String propertyName) {
+    Map<String, Object> props = zkStateReader.getClusterProperties();
+    return props.get(propertyName);
+  }
+
+  @Override
+  public Object getClusterProperty(String propertyName, String def) {
+    Map<String, Object> props = zkStateReader.getClusterProperties();
+    if (props.containsKey(propertyName)) {
+      return props.get(propertyName);
+    }
+    return def;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConversionEvaluator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConversionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConversionEvaluator.java
new file mode 100644
index 0000000..2849b49
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/ConversionEvaluator.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class ConversionEvaluator extends ComplexEvaluator {
+
+  enum LENGTH_CONSTANT {MILES, YARDS, FEET, INCHES, MILLIMETERS, CENTIMETERS, METERS, KILOMETERS};
+
+  private LENGTH_CONSTANT from;
+  private LENGTH_CONSTANT to;
+  private Convert convert;
+
+  public ConversionEvaluator(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+
+    if (3 != subEvaluators.size()) {
+      throw new EvaluatorException(new IOException(String.format(Locale.ROOT, "Invalid expression %s - expecting 3 value but found %d", expression, subEvaluators.size())));
+    }
+
+    try {
+      from = LENGTH_CONSTANT.valueOf(subEvaluators.get(0).toExpression(factory).toString().toUpperCase(Locale.ROOT));
+      to = LENGTH_CONSTANT.valueOf(subEvaluators.get(1).toExpression(factory).toString().toUpperCase(Locale.ROOT));
+      this.convert = getConvert(from, to);
+    } catch (IllegalArgumentException e) {
+      throw new EvaluatorException(e);
+    }
+  }
+
+  private String listParams() {
+    StringBuffer buf = new StringBuffer();
+    for(LENGTH_CONSTANT lc : LENGTH_CONSTANT.values()) {
+      if(buf.length() > 0) {
+        buf.append(", ");
+      }
+        buf.append(lc.toString());
+    }
+    return buf.toString();
+  }
+
+  @Override
+  public Object evaluate(Tuple tuple) throws IOException {
+
+    StreamEvaluator streamEvaluator = subEvaluators.get(2);
+    Object tupleValue = streamEvaluator.evaluate(tuple);
+
+    if (tupleValue == null) return null;
+
+    Number number = (Number)tupleValue;
+    double d = number.doubleValue();
+    return convert.convert(d);
+  }
+
+  private Convert getConvert(LENGTH_CONSTANT from, LENGTH_CONSTANT to) throws IOException {
+    switch(from) {
+      case INCHES:
+        switch(to) {
+          case MILLIMETERS:
+            return (double d) -> d*25.4;
+          case CENTIMETERS:
+            return (double d) -> d*2.54;
+          case METERS:
+            return (double d) -> d*0.0254;
+          default:
+            throw new EvaluatorException("No conversion available from "+from+" to "+to);
+        }
+      case FEET:
+        switch(to) {
+          case METERS:
+            return (double d) -> d * .30;
+        }
+      case YARDS:
+        switch(to) {
+          case METERS:
+            return (double d) -> d * .91;
+          case KILOMETERS:
+            return (double d) -> d * 0.00091;
+          default:
+            throw new EvaluatorException("No conversion available from "+from+" to "+to);
+        }
+      case MILES:
+        switch(to) {
+          case KILOMETERS:
+            return (double d) -> d * 1.61;
+          default:
+            throw new EvaluatorException("No conversion available from "+from+" to "+to);
+        }
+      case MILLIMETERS:
+        switch (to) {
+          case INCHES:
+            return (double d) -> d * 0.039;
+          default:
+            throw new EvaluatorException("No conversion available from "+from+" to "+to);
+        }
+      case CENTIMETERS:
+        switch(to) {
+          case INCHES:
+            return (double d) -> d * 0.39;
+          default:
+            throw new EvaluatorException("No conversion available from "+from+" to "+to);
+        }
+      case METERS:
+        switch(to) {
+          case FEET:
+            return (double d) -> d * 3.28;
+          default:
+            throw new EvaluatorException("No conversion available from "+from+" to "+to);
+        }
+      case KILOMETERS:
+        switch(to) {
+          case MILES:
+            return (double d) -> d * 0.62;
+          case FEET:
+            return (double d) -> d * 3280.8;
+        }
+      default:
+        throw new EvaluatorException("No conversion available from "+from);
+    }
+  }
+
+  private interface Convert {
+    public double convert(double d);
+  }
+
+  @Override
+  public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+
+    for (StreamEvaluator evaluator : subEvaluators) {
+      expression.addParameter(evaluator.toExpression(factory));
+    }
+
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+    return new Explanation(nodeId.toString())
+        .withExpressionType(Explanation.ExpressionType.EVALUATOR)
+        .withImplementingClass(getClass().getName())
+        .withExpression(toExpression(factory).toString());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EvaluatorException.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EvaluatorException.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EvaluatorException.java
new file mode 100644
index 0000000..d2098c2
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EvaluatorException.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+
+public class EvaluatorException extends IOException {
+  public EvaluatorException(Throwable t) {
+    super(t);
+  }
+
+  public EvaluatorException(String message) {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluator.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluator.java
new file mode 100644
index 0000000..4af1f354
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluator.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeParseException;
+import java.time.temporal.TemporalAccessor;
+import java.time.temporal.UnsupportedTemporalTypeException;
+import java.util.Date;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * A generic date evaluator for use with a TemporalAccessor
+ */
+public abstract class TemporalEvaluator extends ComplexEvaluator {
+
+  private String field;
+
+  public TemporalEvaluator(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+
+    if (1 != subEvaluators.size()) {
+      throw new IOException(String.format(Locale.ROOT, "Invalid expression %s - expecting one value but found %d", expression, subEvaluators.size()));
+    }
+  }
+
+  @Override
+  public Object evaluate(Tuple tuple) throws IOException {
+
+    Instant instant = null;
+    TemporalAccessor date = null;
+
+    //First evaluate the parameter
+    StreamEvaluator streamEvaluator = subEvaluators.get(0);
+    Object tupleValue = streamEvaluator.evaluate(tuple);
+
+    if (tupleValue == null) return null;
+
+    if(field == null) {
+      field = streamEvaluator.toExpression(constructingFactory).toString();
+    }
+
+    Map tupleContext = streamContext.getTupleContext();
+    date = (LocalDateTime)tupleContext.get(field); // Check to see if the date has already been created for this field
+
+    if(date == null) {
+      if (tupleValue instanceof String) {
+        instant = getInstant((String) tupleValue);
+      } else if (tupleValue instanceof Long) {
+        instant = Instant.ofEpochMilli((Long) tupleValue);
+      } else if (tupleValue instanceof Instant) {
+        instant = (Instant) tupleValue;
+      } else if (tupleValue instanceof Date) {
+        instant = ((Date) tupleValue).toInstant();
+      } else if (tupleValue instanceof TemporalAccessor) {
+        date = ((TemporalAccessor) tupleValue);
+        tupleContext.put(field, date); // Cache the date in the TupleContext
+      }
+    }
+
+    if (instant != null) {
+      if (TemporalEvaluatorEpoch.FUNCTION_NAME.equals(getFunction())) return instant.toEpochMilli();
+      date = LocalDateTime.ofInstant(instant, ZoneOffset.UTC);
+      tupleContext.put(field, date); // Cache the date in the TupleContext
+    }
+
+    if (date != null) {
+      try {
+        return evaluateDate(date);
+      } catch (UnsupportedTemporalTypeException utte) {
+        throw new IOException(String.format(Locale.ROOT, "It is not possible to call '%s' function on %s", getFunction(), date.getClass().getName()));
+      }
+    }
+
+    throw new IOException(String.format(Locale.ROOT, "Invalid parameter %s - The parameter must be a string formatted ISO_INSTANT or of type Long,Instant,Date,LocalDateTime or TemporalAccessor.", String.valueOf(tupleValue)));
+  }
+
+  public abstract Object evaluateDate(TemporalAccessor aDate) throws IOException;
+  public abstract String getFunction();
+
+  protected Instant getInstant(String dateStr) throws IOException {
+
+    if (dateStr != null && !dateStr.isEmpty()) {
+      try {
+        return Instant.parse(dateStr);
+      } catch (DateTimeParseException e) {
+        throw new IOException(String.format(Locale.ROOT, "Invalid parameter %s - The String must be formatted in the ISO_INSTANT date format.", dateStr));
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
+    StreamExpression expression = new StreamExpression(getFunction());
+
+    for (StreamEvaluator evaluator : subEvaluators) {
+      expression.addParameter(evaluator.toExpression(factory));
+    }
+
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+    return new Explanation(nodeId.toString())
+        .withExpressionType(Explanation.ExpressionType.EVALUATOR)
+        .withImplementingClass(getClass().getName())
+        .withExpression(toExpression(factory).toString());
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDay.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDay.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDay.java
new file mode 100644
index 0000000..88b5043
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDay.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a day stream evaluator
+ */
+public class TemporalEvaluatorDay extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "day";
+
+  public TemporalEvaluatorDay(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(ChronoField.DAY_OF_MONTH);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfQuarter.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfQuarter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfQuarter.java
new file mode 100644
index 0000000..f034377
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfQuarter.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.IsoFields;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a dayOfQuarter stream evaluator
+ */
+public class TemporalEvaluatorDayOfQuarter extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "dayOfQuarter";
+
+  public TemporalEvaluatorDayOfQuarter(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(IsoFields.DAY_OF_QUARTER);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfYear.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfYear.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfYear.java
new file mode 100644
index 0000000..339f938
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorDayOfYear.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a dayOfYear stream evaluator
+ */
+public class TemporalEvaluatorDayOfYear extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "dayOfYear";
+
+  public TemporalEvaluatorDayOfYear(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(ChronoField.DAY_OF_YEAR);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorEpoch.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorEpoch.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorEpoch.java
new file mode 100644
index 0000000..a8554b3
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorEpoch.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.time.temporal.TemporalAccessor;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a epoch stream evaluator
+ */
+public class TemporalEvaluatorEpoch extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "epoch";
+
+  public TemporalEvaluatorEpoch(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) throws IOException {
+    if (aDate instanceof LocalDateTime) {
+      return ((LocalDateTime)aDate).atZone(ZoneOffset.UTC).toInstant().toEpochMilli();
+    }
+    throw new IOException(String.format(Locale.ROOT, "Unsupported function '%s' called on %s", FUNCTION_NAME, aDate.toString()));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorHour.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorHour.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorHour.java
new file mode 100644
index 0000000..5480870
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorHour.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a hour stream evaluator
+ */
+public class TemporalEvaluatorHour extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "hour";
+
+  public TemporalEvaluatorHour(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(ChronoField.HOUR_OF_DAY);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMinute.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMinute.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMinute.java
new file mode 100644
index 0000000..9438e9d
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMinute.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a minute stream evaluator
+ */
+public class TemporalEvaluatorMinute extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "minute";
+
+  public TemporalEvaluatorMinute(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(ChronoField.MINUTE_OF_HOUR);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMonth.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMonth.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMonth.java
new file mode 100644
index 0000000..b9a70fe
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorMonth.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a month stream evaluator
+ */
+public class TemporalEvaluatorMonth extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "month";
+
+  public TemporalEvaluatorMonth(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(ChronoField.MONTH_OF_YEAR);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorQuarter.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorQuarter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorQuarter.java
new file mode 100644
index 0000000..0144311
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorQuarter.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.IsoFields;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a quarter stream evaluator
+ */
+public class TemporalEvaluatorQuarter extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "quarter";
+
+  public TemporalEvaluatorQuarter(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(IsoFields.QUARTER_OF_YEAR);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorSecond.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorSecond.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorSecond.java
new file mode 100644
index 0000000..f5b71fc
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorSecond.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a second stream evaluator
+ */
+public class TemporalEvaluatorSecond extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "second";
+
+  public TemporalEvaluatorSecond(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(ChronoField.SECOND_OF_MINUTE);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorWeek.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorWeek.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorWeek.java
new file mode 100644
index 0000000..1a2974a
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorWeek.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.IsoFields;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a week stream evaluator
+ */
+public class TemporalEvaluatorWeek extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "week";
+
+  public TemporalEvaluatorWeek(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(IsoFields.WEEK_OF_WEEK_BASED_YEAR);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorYear.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorYear.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorYear.java
new file mode 100644
index 0000000..0b8d69c
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TemporalEvaluatorYear.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+/**
+ * Provides a year stream evaluator
+ */
+public class TemporalEvaluatorYear extends TemporalEvaluator {
+
+  public static final String FUNCTION_NAME = "year";
+
+  public TemporalEvaluatorYear(StreamExpression expression, StreamFactory factory) throws IOException {
+    super(expression, factory);
+  }
+
+  @Override
+  public String getFunction() {
+    return FUNCTION_NAME;
+  }
+
+  @Override
+  public Object evaluateDate(TemporalAccessor aDate) {
+    return aDate.get(ChronoField.YEAR);
+  }
+
+}


[23/23] lucene-solr:feature/autoscaling: Squash-merge from master.

Posted by ab...@apache.org.
Squash-merge from master.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d8df9f8c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d8df9f8c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d8df9f8c

Branch: refs/heads/feature/autoscaling
Commit: d8df9f8c9963c2fc1718fd471316bf5d964125ba
Parents: 81e0f80
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Thu Apr 20 10:49:30 2017 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Thu Apr 20 12:19:14 2017 +0200

----------------------------------------------------------------------
 .../dot.settings/org.eclipse.jdt.core.prefs     |   10 +
 .../.idea/libraries/Solr_DIH_core_library.xml   |   10 +
 .../dataimporthandler/dataimporthandler.iml     |    3 +-
 lucene/CHANGES.txt                              |   25 +
 lucene/MIGRATE.txt                              |   13 +
 .../analysis/commongrams/CommonGramsFilter.java |    2 +-
 .../standard/UAX29URLEmailAnalyzer.java         |   11 +-
 .../standard/UAX29URLEmailTokenizer.java        |   23 +-
 .../standard/TestUAX29URLEmailAnalyzer.java     |   23 +
 .../standard/TestUAX29URLEmailTokenizer.java    |    2 +-
 lucene/analysis/morfologik/ivy.xml              |    1 +
 .../uk/UkrainianMorfologikAnalyzer.java         |   10 +-
 .../apache/lucene/analysis/uk/ukrainian.dict    |  Bin 1989243 -> 0 bytes
 .../apache/lucene/analysis/uk/ukrainian.info    |   10 -
 .../analysis/uk/TestUkrainianAnalyzer.java      |   10 +-
 .../BooleanPerceptronClassifier.java            |   26 +-
 .../KNearestNeighborClassifier.java             |   14 +-
 .../KNearestNeighborDocumentClassifier.java     |   14 +-
 .../BooleanPerceptronClassifierTest.java        |   51 +-
 .../classification/ClassificationTestBase.java  |    5 +-
 .../SimpleNaiveBayesClassifierTest.java         |   11 +-
 .../KNearestNeighborDocumentClassifierTest.java |   10 +-
 .../codecs/simpletext/SimpleTextBKDWriter.java  |    2 +-
 .../analysis/standard/StandardAnalyzer.java     |   11 +-
 .../analysis/standard/StandardTokenizer.java    |    6 +-
 .../lucene/index/BufferedUpdatesStream.java     |    2 +-
 .../org/apache/lucene/index/MultiDocValues.java |    2 +-
 .../apache/lucene/index/SegmentDocValues.java   |    2 +-
 .../org/apache/lucene/index/SegmentReader.java  |   25 +-
 .../apache/lucene/index/SortedDocValues.java    |    2 +-
 .../lucene/index/StandardDirectoryReader.java   |    3 +-
 .../apache/lucene/search/FieldComparator.java   |    8 -
 .../apache/lucene/search/TopFieldCollector.java |    4 +-
 .../org/apache/lucene/util/ByteBlockPool.java   |   61 +-
 .../org/apache/lucene/util/OfflineSorter.java   |   63 +-
 .../java/org/apache/lucene/util/Version.java    |    7 +
 .../org/apache/lucene/util/bkd/BKDWriter.java   |    2 +-
 .../analysis/standard/TestStandardAnalyzer.java |   23 +
 .../lucene/index/TestIndexReaderClose.java      |   40 +-
 .../apache/lucene/util/TestByteBlockPool.java   |   34 +-
 .../apache/lucene/util/TestOfflineSorter.java   |   42 +
 .../search/grouping/AllGroupHeadsCollector.java |  239 +-
 .../search/grouping/AllGroupsCollector.java     |   42 +-
 .../search/grouping/BlockGroupingCollector.java |    2 +-
 .../grouping/DistinctValuesCollector.java       |   92 +-
 .../grouping/FirstPassGroupingCollector.java    |   63 +-
 .../lucene/search/grouping/GroupReducer.java    |  112 +
 .../lucene/search/grouping/GroupSelector.java   |   73 +
 .../apache/lucene/search/grouping/Grouper.java  |   56 -
 .../lucene/search/grouping/GroupingSearch.java  |   24 +-
 .../grouping/SecondPassGroupingCollector.java   |  144 +-
 .../grouping/TermGroupFacetCollector.java       |  414 ++
 .../search/grouping/TermGroupSelector.java      |  114 +
 .../search/grouping/TopGroupsCollector.java     |  116 +
 .../grouping/ValueSourceGroupSelector.java      |   86 +
 .../FunctionAllGroupHeadsCollector.java         |  159 -
 .../function/FunctionAllGroupsCollector.java    |   82 -
 .../FunctionDistinctValuesCollector.java        |   85 -
 .../FunctionFirstPassGroupingCollector.java     |   86 -
 .../grouping/function/FunctionGrouper.java      |   69 -
 .../FunctionSecondPassGroupingCollector.java    |   80 -
 .../search/grouping/function/package-info.java  |   21 -
 .../lucene/search/grouping/package-info.java    |   34 +-
 .../term/TermAllGroupHeadsCollector.java        |  767 ----
 .../grouping/term/TermAllGroupsCollector.java   |  128 -
 .../term/TermDistinctValuesCollector.java       |  162 -
 .../term/TermFirstPassGroupingCollector.java    |   96 -
 .../grouping/term/TermGroupFacetCollector.java  |  415 --
 .../search/grouping/term/TermGrouper.java       |   81 -
 .../term/TermSecondPassGroupingCollector.java   |   91 -
 .../search/grouping/term/package-info.java      |   21 -
 .../grouping/AllGroupHeadsCollectorTest.java    |   13 +-
 .../search/grouping/AllGroupsCollectorTest.java |   20 +-
 .../grouping/DistinctValuesCollectorTest.java   |   86 +-
 .../grouping/GroupFacetCollectorTest.java       |    1 -
 .../lucene/search/grouping/TestGrouping.java    |   89 +-
 .../uhighlight/MultiTermHighlighting.java       |   10 +
 .../uhighlight/TestUnifiedHighlighterMTQ.java   |    9 +-
 lucene/ivy-versions.properties                  |    4 +
 .../morfologik-ukrainian-search-3.7.5.jar.sha1  |    1 +
 .../morfologik-ukrainian-search-LICENSE-ASL.txt |  202 +
 .../morfologik-ukrainian-search-NOTICE.txt      |    6 +
 .../apache/lucene/index/memory/MemoryIndex.java |   10 +-
 .../lucene/index/memory/TestMemoryIndex.java    |   12 +
 .../lucene/document/InetAddressRange.java       |   14 +-
 .../search/TestDiversifiedTopDocsCollector.java |    2 +-
 .../search/TestInetAddressRangeQueries.java     |  100 +-
 .../spatial/prefix/HeatmapFacetCounter.java     |    3 +
 .../lucene/index/AssertingLeafReader.java       |    2 +-
 .../lucene/index/OwnCacheKeyMultiReader.java    |    3 +-
 lucene/tools/javadoc/ecj.javadocs.prefs         |   20 +-
 lucene/tools/junit4/cached-timehints.txt        |    2 +-
 solr/CHANGES.txt                                |  104 +-
 .../plugin/AnalyticsStatisticsCollector.java    |   31 +-
 .../handler/component/AnalyticsComponent.java   |   16 +-
 .../handler/dataimport/DataImportHandler.java   |   57 +-
 .../solr/handler/dataimport/DataImporter.java   |    3 +-
 .../solr/handler/dataimport/DocBuilder.java     |    2 +-
 .../handler/dataimport/ZKPropertiesWriter.java  |    2 +-
 .../apache/solr/ltr/norm/MinMaxNormalizer.java  |    4 +-
 .../solr/ltr/norm/StandardNormalizer.java       |    4 +-
 .../LTRFeatureLoggerTransformerFactory.java     |    5 -
 .../solr/ltr/search/LTRQParserPlugin.java       |    5 -
 .../solr/ltr/norm/TestMinMaxNormalizer.java     |    1 +
 .../solr/ltr/norm/TestStandardNormalizer.java   |    1 +
 .../response/SolrVelocityResourceLoader.java    |   12 +-
 .../solrj/embedded/EmbeddedSolrServer.java      |    2 +-
 .../java/org/apache/solr/cloud/CloudUtil.java   |    2 +-
 .../org/apache/solr/cloud/DistributedQueue.java |   56 +-
 .../org/apache/solr/cloud/ElectionContext.java  |    8 +-
 .../org/apache/solr/cloud/MoveReplicaCmd.java   |  193 +
 .../java/org/apache/solr/cloud/Overseer.java    |    2 +-
 .../cloud/OverseerCollectionMessageHandler.java |    1 +
 .../org/apache/solr/cloud/RecoveryStrategy.java |    4 +-
 .../apache/solr/cloud/ReplicateFromLeader.java  |    2 +-
 .../org/apache/solr/cloud/ZkController.java     |    2 +-
 .../apache/solr/cloud/overseer/NodeMutator.java |   29 +-
 .../solr/cloud/overseer/ZkWriteCommand.java     |    5 +
 .../apache/solr/cloud/rule/ImplicitSnitch.java  |    2 +-
 .../org/apache/solr/core/CoreContainer.java     |  176 +-
 .../org/apache/solr/core/CoreDescriptor.java    |   40 +-
 .../apache/solr/core/CorePropertiesLocator.java |   10 +-
 .../java/org/apache/solr/core/CoresLocator.java |    9 +
 .../org/apache/solr/core/DirectoryFactory.java  |    8 -
 .../apache/solr/core/HdfsDirectoryFactory.java  |   12 +-
 .../org/apache/solr/core/JmxMonitoredMap.java   |  478 --
 .../java/org/apache/solr/core/NodeConfig.java   |   28 +-
 .../java/org/apache/solr/core/PluginBag.java    |    6 +-
 .../java/org/apache/solr/core/SolrConfig.java   |   52 +-
 .../src/java/org/apache/solr/core/SolrCore.java |  232 +-
 .../java/org/apache/solr/core/SolrCores.java    |   70 +-
 .../java/org/apache/solr/core/SolrInfoBean.java |   95 +
 .../org/apache/solr/core/SolrInfoMBean.java     |   76 -
 .../apache/solr/core/SolrInfoMBeanWrapper.java  |   62 -
 .../apache/solr/core/SolrResourceLoader.java    |   18 +-
 .../org/apache/solr/core/SolrXmlConfig.java     |   65 +-
 .../java/org/apache/solr/core/ZkContainer.java  |    2 +-
 .../apache/solr/handler/AnalyzeEvaluator.java   |    3 -
 .../solr/handler/CdcrBufferStateManager.java    |    8 +-
 .../solr/handler/CdcrLeaderStateManager.java    |   10 +-
 .../solr/handler/CdcrProcessStateManager.java   |    8 +-
 .../solr/handler/CdcrReplicatorManager.java     |    2 +-
 .../apache/solr/handler/CdcrRequestHandler.java |    8 +-
 .../solr/handler/CdcrUpdateLogSynchronizer.java |    2 +-
 .../org/apache/solr/handler/GraphHandler.java   |    4 +-
 .../org/apache/solr/handler/IndexFetcher.java   |   12 +-
 .../solr/handler/MoreLikeThisHandler.java       |   10 -
 .../apache/solr/handler/RealTimeGetHandler.java |    6 -
 .../apache/solr/handler/ReplicationHandler.java |  117 +-
 .../apache/solr/handler/RequestHandlerBase.java |   62 +-
 .../org/apache/solr/handler/SQLHandler.java     |    4 +-
 .../apache/solr/handler/SolrConfigHandler.java  |   12 +-
 .../solr/handler/StandardRequestHandler.java    |   11 -
 .../org/apache/solr/handler/StreamHandler.java  |  131 +-
 .../handler/admin/CollectionHandlerApi.java     |    2 +
 .../solr/handler/admin/CollectionsHandler.java  |   26 +-
 .../solr/handler/admin/ConfigSetsHandler.java   |   14 +-
 .../solr/handler/admin/CoreAdminHandler.java    |    2 +-
 .../solr/handler/admin/LoggingHandler.java      |    2 +-
 .../solr/handler/admin/LukeRequestHandler.java  |   10 -
 .../handler/admin/MetricsCollectorHandler.java  |    2 +-
 .../solr/handler/admin/MetricsHandler.java      |   74 +-
 .../solr/handler/admin/PluginInfoHandler.java   |   27 +-
 .../solr/handler/admin/PrepRecoveryOp.java      |    2 +-
 .../solr/handler/admin/RequestSyncShardOp.java  |    4 +-
 .../handler/admin/ShowFileRequestHandler.java   |    2 +-
 .../handler/admin/SolrInfoMBeanHandler.java     |   32 +-
 .../solr/handler/admin/SystemInfoHandler.java   |   68 +-
 .../solr/handler/component/DebugComponent.java  |   10 +-
 .../solr/handler/component/ExpandComponent.java |   15 +-
 .../solr/handler/component/FacetComponent.java  |    8 +-
 .../handler/component/HighlightComponent.java   |    8 +-
 .../handler/component/HttpShardHandler.java     |    2 +-
 .../component/HttpShardHandlerFactory.java      |   12 +-
 .../component/MoreLikeThisComponent.java        |    8 +-
 .../solr/handler/component/QueryComponent.java  |   13 +-
 .../component/QueryElevationComponent.java      |   18 +-
 .../handler/component/RealTimeGetComponent.java |   16 +-
 .../solr/handler/component/SearchComponent.java |   31 +-
 .../solr/handler/component/SearchHandler.java   |    8 +-
 .../handler/component/SpellCheckComponent.java  |    2 +-
 .../solr/handler/component/StatsComponent.java  |    2 +-
 .../handler/component/SuggestComponent.java     |   27 +-
 .../org/apache/solr/handler/sql/SolrTable.java  |    6 +-
 .../solr/highlight/DefaultSolrHighlighter.java  |   30 +-
 .../apache/solr/highlight/GapFragmenter.java    |    2 +-
 .../solr/highlight/HighlightingPluginBase.java  |   41 +-
 .../apache/solr/highlight/HtmlFormatter.java    |    2 +-
 .../apache/solr/highlight/RegexFragmenter.java  |    2 +-
 .../solr/highlight/SimpleFragListBuilder.java   |    2 +-
 .../solr/highlight/SingleFragListBuilder.java   |    2 +-
 .../solr/highlight/SolrBoundaryScanner.java     |    6 +-
 .../org/apache/solr/highlight/SolrEncoder.java  |    4 +-
 .../apache/solr/highlight/SolrFormatter.java    |    4 +-
 .../solr/highlight/SolrFragListBuilder.java     |    4 +-
 .../apache/solr/highlight/SolrFragmenter.java   |    4 +-
 .../solr/highlight/SolrFragmentsBuilder.java    |    6 +-
 .../apache/solr/highlight/SolrHighlighter.java  |   51 +-
 .../solr/highlight/WeightedFragListBuilder.java |    2 +-
 .../apache/solr/logging/MDCLoggingContext.java  |   12 +-
 .../solr/metrics/AltBufferPoolMetricSet.java    |   47 +
 .../org/apache/solr/metrics/MetricsMap.java     |  184 +
 .../solr/metrics/OperatingSystemMetricSet.java  |   66 +-
 .../solr/metrics/SolrCoreMetricManager.java     |   31 +-
 .../org/apache/solr/metrics/SolrMetricInfo.java |   23 +-
 .../apache/solr/metrics/SolrMetricManager.java  |  144 +-
 .../apache/solr/metrics/SolrMetricReporter.java |   12 +
 .../metrics/reporters/JmxObjectNameFactory.java |   63 +-
 .../metrics/reporters/ReporterClientCache.java  |   84 +
 .../metrics/reporters/SolrGangliaReporter.java  |   48 +-
 .../metrics/reporters/SolrGraphiteReporter.java |   46 +-
 .../solr/metrics/reporters/SolrJmxReporter.java |  206 +-
 .../metrics/reporters/SolrSlf4jReporter.java    |   29 +-
 .../reporters/solr/SolrClusterReporter.java     |   28 +-
 .../metrics/reporters/solr/SolrReporter.java    |    2 +-
 .../reporters/solr/SolrShardReporter.java       |   21 +-
 .../org/apache/solr/parser/QueryParser.java     |    5 +-
 .../java/org/apache/solr/parser/QueryParser.jj  |    5 +-
 .../apache/solr/parser/SolrQueryParserBase.java |   23 +-
 .../org/apache/solr/request/SimpleFacets.java   |    7 +-
 .../apache/solr/request/SolrRequestHandler.java |    4 +-
 .../analysis/ManagedSynonymFilterFactory.java   |    7 +-
 .../org/apache/solr/schema/CurrencyField.java   |    7 +-
 .../org/apache/solr/schema/DatePointField.java  |    9 +-
 .../apache/solr/schema/DoublePointField.java    |    7 +-
 .../java/org/apache/solr/schema/FieldType.java  |    6 +-
 .../org/apache/solr/schema/FloatPointField.java |    7 +-
 .../org/apache/solr/schema/IndexSchema.java     |   13 +-
 .../org/apache/solr/schema/IntPointField.java   |    7 +-
 .../org/apache/solr/schema/LongPointField.java  |    7 +-
 .../java/org/apache/solr/schema/PointField.java |   53 +-
 .../org/apache/solr/schema/SchemaField.java     |   32 +-
 .../org/apache/solr/schema/SchemaManager.java   |    2 +-
 .../java/org/apache/solr/schema/TextField.java  |   13 +-
 .../apache/solr/schema/ZkIndexSchemaReader.java |    2 +-
 .../solr/search/ExtendedDismaxQParser.java      |    5 +-
 .../org/apache/solr/search/FastLRUCache.java    |  126 +-
 .../java/org/apache/solr/search/Grouping.java   |   57 +-
 .../apache/solr/search/JoinQParserPlugin.java   |    4 +-
 .../java/org/apache/solr/search/LFUCache.java   |  147 +-
 .../java/org/apache/solr/search/LRUCache.java   |   82 +-
 .../org/apache/solr/search/QParserPlugin.java   |   22 +-
 .../java/org/apache/solr/search/SolrCache.java  |    5 +-
 .../org/apache/solr/search/SolrCacheBase.java   |    7 +-
 .../apache/solr/search/SolrFieldCacheBean.java  |   77 +
 .../apache/solr/search/SolrFieldCacheMBean.java |   78 -
 .../apache/solr/search/SolrIndexSearcher.java   |   93 +-
 .../apache/solr/search/ValueSourceParser.java   |   23 +-
 .../solr/search/facet/FacetFieldMerger.java     |    2 +
 .../facet/FacetFieldProcessorByArray.java       |   24 +-
 .../apache/solr/search/facet/FacetModule.java   |    6 -
 .../org/apache/solr/search/facet/SlotAcc.java   |  181 +-
 .../org/apache/solr/search/facet/StddevAgg.java |   66 +
 .../apache/solr/search/facet/VarianceAgg.java   |   65 +
 .../apache/solr/search/grouping/Command.java    |    2 +-
 .../solr/search/grouping/CommandHandler.java    |   10 +-
 .../distributed/command/QueryCommand.java       |    2 +-
 .../command/SearchGroupsFieldCommand.java       |   28 +-
 .../command/TopGroupsFieldCommand.java          |   52 +-
 .../SearchGroupShardResponseProcessor.java      |    6 +-
 .../TopGroupsShardResponseProcessor.java        |   12 +-
 .../SearchGroupsResultTransformer.java          |    2 +-
 .../ShardResultTransformer.java                 |    4 +-
 .../TopGroupsResultTransformer.java             |   14 +-
 .../search/join/ScoreJoinQParserPlugin.java     |    4 +-
 .../apache/solr/servlet/SolrDispatchFilter.java |   24 +-
 .../solr/spelling/SpellCheckCollator.java       |   15 +
 .../solr/spelling/SpellingQueryConverter.java   |    2 +-
 .../apache/solr/store/blockcache/Metrics.java   |  124 +-
 .../solr/store/hdfs/HdfsLocalityReporter.java   |  141 +-
 .../solr/uninverting/UninvertingReader.java     |   62 +-
 .../solr/update/DefaultSolrCoreState.java       |    2 +-
 .../solr/update/DirectUpdateHandler2.java       |  122 +-
 .../org/apache/solr/update/HdfsUpdateLog.java   |    4 +-
 .../java/org/apache/solr/update/PeerSync.java   |   18 +-
 .../org/apache/solr/update/SolrIndexWriter.java |   28 +-
 .../org/apache/solr/update/UpdateHandler.java   |   18 +-
 .../java/org/apache/solr/update/UpdateLog.java  |   16 +-
 .../apache/solr/update/UpdateShardHandler.java  |   58 +-
 .../solr/update/UpdateShardHandlerConfig.java   |   12 +-
 .../processor/DistributedUpdateProcessor.java   |   27 +-
 .../DocExpirationUpdateProcessorFactory.java    |    2 +-
 .../processor/TolerantUpdateProcessor.java      |    2 +-
 .../src/java/org/apache/solr/util/JmxUtil.java  |    3 -
 .../org/apache/solr/util/SolrLogLayout.java     |    2 +-
 ...entedPoolingHttpClientConnectionManager.java |    8 +-
 .../org/apache/solr/util/stats/MetricUtils.java |  594 ++-
 .../src/resources/apispec/cluster.aliases.json  |   12 +
 .../solr/collection1/conf/schema-point.xml      |   39 +-
 .../solr/collection1/conf/schema-rest.xml       |    2 +-
 .../solr/collection1/conf/schema12.xml          |   11 +-
 .../conf/solrconfig-collapseqparser.xml         |  111 +-
 .../src/test-files/solr/solr-hiddensysprops.xml |   31 +
 .../src/test-files/solr/solr-jmxreporter.xml    |   43 +
 .../src/test-files/solr/solr-solrreporter.xml   |    4 +
 .../org/apache/solr/BasicFunctionalityTest.java |   17 +-
 .../test/org/apache/solr/CursorPagingTest.java  |   22 +-
 .../test/org/apache/solr/SolrInfoBeanTest.java  |  124 +
 .../test/org/apache/solr/SolrInfoMBeanTest.java |  118 -
 .../org/apache/solr/TestGroupingSearch.java     |    4 +-
 .../apache/solr/cloud/AliasIntegrationTest.java |    4 +
 .../solr/cloud/BasicDistributedZkTest.java      |   24 +-
 .../test/org/apache/solr/cloud/BasicZkTest.java |   15 +-
 .../apache/solr/cloud/ClusterStateMockUtil.java |  233 +
 .../CollectionsAPIAsyncDistributedZkTest.java   |   16 +-
 .../cloud/CollectionsAPIDistributedZkTest.java  |   36 +-
 .../apache/solr/cloud/DistributedQueueTest.java |   50 +-
 .../org/apache/solr/cloud/MoveReplicaTest.java  |  125 +
 .../org/apache/solr/cloud/NodeMutatorTest.java  |   95 +
 .../solr/cloud/ReplicationFactorTest.java       |    9 -
 .../SharedFSAutoReplicaFailoverUtilsTest.java   |  263 +-
 .../cloud/TestRandomRequestDistribution.java    |   19 +-
 .../HdfsCollectionsAPIDistributedZkTest.java    |  114 +
 .../solr/core/ExitableDirectoryReaderTest.java  |   26 +-
 .../solr/core/HdfsDirectoryFactoryTest.java     |   29 +-
 .../test/org/apache/solr/core/MockInfoBean.java |   71 +
 .../org/apache/solr/core/MockInfoMBean.java     |   69 -
 .../core/MockQuerySenderListenerReqHandler.java |   15 +-
 .../apache/solr/core/RequestHandlersTest.java   |   21 +-
 .../test/org/apache/solr/core/SolrCoreTest.java |    4 +-
 .../org/apache/solr/core/TestCodecSupport.java  |   10 +-
 .../org/apache/solr/core/TestCoreContainer.java |   30 +-
 .../org/apache/solr/core/TestCoreDiscovery.java |    2 +-
 .../apache/solr/core/TestJmxIntegration.java    |   94 +-
 .../apache/solr/core/TestJmxMonitoredMap.java   |  217 -
 .../org/apache/solr/core/TestLazyCores.java     |   19 +-
 .../apache/solr/core/TestSolrDynamicMBean.java  |   87 -
 .../solr/handler/admin/MBeansHandlerTest.java   |   14 +-
 .../solr/handler/admin/MetricsHandlerTest.java  |   79 +-
 .../solr/handler/admin/StatsReloadRaceTest.java |   40 +-
 .../handler/admin/SystemInfoHandlerTest.java    |    8 +-
 .../component/ResourceSharingTestComponent.java |    5 -
 .../apache/solr/highlight/HighlighterTest.java  |   20 +
 .../highlight/TestPostingsSolrHighlighter.java  |   13 +
 .../highlight/TestUnifiedSolrHighlighter.java   |   13 +
 .../org/apache/solr/metrics/JvmMetricsTest.java |  101 +-
 .../solr/metrics/SolrCoreMetricManagerTest.java |    8 +-
 .../solr/metrics/SolrMetricManagerTest.java     |   44 +-
 .../solr/metrics/SolrMetricReporterTest.java    |    1 +
 .../solr/metrics/SolrMetricTestUtils.java       |   12 +-
 .../metrics/SolrMetricsIntegrationTest.java     |   17 +-
 .../reporters/SolrGangliaReporterTest.java      |    2 +-
 .../reporters/SolrGraphiteReporterTest.java     |    4 +-
 .../metrics/reporters/SolrJmxReporterTest.java  |   67 +-
 .../reporters/SolrSlf4jReporterTest.java        |    2 +-
 .../reporters/solr/SolrCloudReportersTest.java  |    9 +-
 .../reporters/solr/SolrShardReporterTest.java   |    2 +-
 .../solr/rest/schema/TestFieldResource.java     |    3 +-
 .../TestManagedSynonymFilterFactory.java        |   40 +
 .../org/apache/solr/schema/DocValuesTest.java   |   72 +
 .../org/apache/solr/schema/TestPointFields.java | 1104 ++++-
 .../solr/schema/TestUseDocValuesAsStored.java   |   34 +-
 .../apache/solr/search/MockSearchComponent.java |    6 -
 .../apache/solr/search/QueryEqualityTest.java   |    3 +-
 .../solr/search/TestExtendedDismaxParser.java   |   18 +
 .../apache/solr/search/TestFastLRUCache.java    |   32 +-
 .../apache/solr/search/TestIndexSearcher.java   |    9 +-
 .../org/apache/solr/search/TestLFUCache.java    |   21 +-
 .../org/apache/solr/search/TestLRUCache.java    |   16 +-
 .../solr/search/TestReRankQParserPlugin.java    |   15 +-
 .../org/apache/solr/search/TestRecovery.java    |  348 +-
 .../apache/solr/search/TestSolr4Spatial2.java   |   11 +-
 .../solr/search/TestSolrFieldCacheBean.java     |   97 +
 .../solr/search/TestSolrFieldCacheMBean.java    |   83 -
 .../apache/solr/search/TestSolrQueryParser.java |   56 +-
 .../search/facet/TestJsonFacetRefinement.java   |   17 +-
 .../solr/search/facet/TestJsonFacets.java       |   19 +-
 .../apache/solr/search/join/BJQParserTest.java  |   28 +-
 .../solr/search/join/TestScoreJoinQPScore.java  |   52 +-
 .../SpellCheckCollatorWithCollapseTest.java     |   67 +
 .../spelling/SpellingQueryConverterTest.java    |   10 +
 .../solr/store/blockcache/BufferStoreTest.java  |   15 +-
 .../org/apache/solr/update/VersionInfoTest.java |    2 +-
 .../ClassificationUpdateProcessorTest.java      |    4 +-
 .../org/apache/solr/util/MockCoreContainer.java |    2 +-
 .../apache/solr/util/stats/MetricUtilsTest.java |   11 +-
 .../solr/client/solrj/impl/CloudSolrClient.java |   85 +-
 .../client/solrj/impl/ClusterStateProvider.java |   59 +
 .../solrj/impl/HttpClusterStateProvider.java    |  261 ++
 .../client/solrj/impl/LBHttpSolrClient.java     |   41 +-
 .../impl/ZkClientClusterStateProvider.java      |   20 +-
 .../solrj/io/eval/ConversionEvaluator.java      |  166 +
 .../solrj/io/eval/EvaluatorException.java       |   30 +
 .../client/solrj/io/eval/TemporalEvaluator.java |  136 +
 .../solrj/io/eval/TemporalEvaluatorDay.java     |   48 +
 .../io/eval/TemporalEvaluatorDayOfQuarter.java  |   48 +
 .../io/eval/TemporalEvaluatorDayOfYear.java     |   48 +
 .../solrj/io/eval/TemporalEvaluatorEpoch.java   |   53 +
 .../solrj/io/eval/TemporalEvaluatorHour.java    |   48 +
 .../solrj/io/eval/TemporalEvaluatorMinute.java  |   48 +
 .../solrj/io/eval/TemporalEvaluatorMonth.java   |   48 +
 .../solrj/io/eval/TemporalEvaluatorQuarter.java |   48 +
 .../solrj/io/eval/TemporalEvaluatorSecond.java  |   48 +
 .../solrj/io/eval/TemporalEvaluatorWeek.java    |   48 +
 .../solrj/io/eval/TemporalEvaluatorYear.java    |   48 +
 .../solrj/io/stream/CalculatorStream.java       |  109 +
 .../client/solrj/io/stream/CloudSolrStream.java |   54 +-
 .../solr/client/solrj/io/stream/EchoStream.java |  119 +
 .../solr/client/solrj/io/stream/EvalStream.java |  143 +
 .../client/solrj/io/stream/FetchStream.java     |   20 +-
 .../client/solrj/io/stream/HavingStream.java    |    3 +
 .../client/solrj/io/stream/ParallelStream.java  |   36 +-
 .../client/solrj/io/stream/SelectStream.java    |   24 +-
 .../client/solrj/io/stream/ShuffleStream.java   |  103 +
 .../client/solrj/io/stream/StreamContext.java   |    9 +
 .../solrj/request/CollectionAdminRequest.java   |   52 +
 .../client/solrj/request/UpdateRequest.java     |    3 +-
 .../solrj/response/CollectionAdminResponse.java |   11 +
 .../apache/solr/common/cloud/ClusterState.java  |    5 +-
 .../solr/common/params/CollectionParams.java    |    1 +
 .../solrj/MergeIndexesExampleTestBase.java      |    2 +-
 .../solrj/impl/CloudSolrClientCacheTest.java    |   21 +-
 .../client/solrj/impl/CloudSolrClientTest.java  |  143 +-
 .../client/solrj/io/stream/JDBCStreamTest.java  |  308 +-
 .../io/stream/SelectWithEvaluatorsTest.java     |   37 +-
 .../solrj/io/stream/StreamExpressionTest.java   | 4075 ++++++++++--------
 .../client/solrj/io/stream/StreamingTest.java   | 2505 ++++++-----
 .../stream/eval/ConversionEvaluatorsTest.java   |  129 +
 .../io/stream/eval/TemporalEvaluatorsTest.java  |  305 ++
 .../java/org/apache/solr/SolrTestCaseJ4.java    |   14 +
 .../cloud/AbstractFullDistribZkTestBase.java    |    9 -
 .../apache/solr/cloud/MiniSolrCloudCluster.java |    5 +
 .../apache/solr/util/ReadOnlyCoresLocator.java  |    6 +
 .../java/org/apache/solr/util/TestHarness.java  |   22 +-
 solr/webapp/web/css/angular/plugins.css         |   10 +-
 .../web/js/angular/controllers/documents.js     |    2 -
 solr/webapp/web/js/scripts/documents.js         |    8 -
 solr/webapp/web/partials/documents.html         |    7 -
 solr/webapp/web/partials/plugins.html           |    2 +-
 solr/webapp/web/tpl/documents.html              |    7 -
 430 files changed, 15725 insertions(+), 10763 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/dev-tools/eclipse/dot.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/dev-tools/eclipse/dot.settings/org.eclipse.jdt.core.prefs b/dev-tools/eclipse/dot.settings/org.eclipse.jdt.core.prefs
index 0f0b112..070fb23 100644
--- a/dev-tools/eclipse/dot.settings/org.eclipse.jdt.core.prefs
+++ b/dev-tools/eclipse/dot.settings/org.eclipse.jdt.core.prefs
@@ -3,14 +3,22 @@ eclipse.preferences.version=1
 org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
 org.eclipse.jdt.core.compiler.compliance=1.8
 org.eclipse.jdt.core.compiler.doc.comment.support=enabled
+org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=error
 org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
 org.eclipse.jdt.core.compiler.problem.comparingIdentical=error
+org.eclipse.jdt.core.compiler.problem.discouragedReference=error
 org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.problem.finalParameterBound=error
+org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=error
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=error
+org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=error
+org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=error
 org.eclipse.jdt.core.compiler.problem.invalidJavadoc=error
 org.eclipse.jdt.core.compiler.problem.invalidJavadocTags=enabled
 org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=disabled
 org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=disabled
 org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
+org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=error
 org.eclipse.jdt.core.compiler.problem.missingJavadocComments=ignore
 org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
 org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=public
@@ -20,6 +28,8 @@ org.eclipse.jdt.core.compiler.problem.missingJavadocTagsMethodTypeParameters=dis
 org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
 org.eclipse.jdt.core.compiler.problem.missingJavadocTagsVisibility=public
 org.eclipse.jdt.core.compiler.problem.noEffectAssignment=error
+org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=error
+org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=error
 org.eclipse.jdt.core.compiler.problem.unusedImport=error
 org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error
 org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/dev-tools/idea/.idea/libraries/Solr_DIH_core_library.xml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/.idea/libraries/Solr_DIH_core_library.xml b/dev-tools/idea/.idea/libraries/Solr_DIH_core_library.xml
new file mode 100644
index 0000000..d363b92
--- /dev/null
+++ b/dev-tools/idea/.idea/libraries/Solr_DIH_core_library.xml
@@ -0,0 +1,10 @@
+<component name="libraryTable">
+  <library name="Solr DIH core library">
+    <CLASSES>
+      <root url="file://$PROJECT_DIR$/solr/contrib/dataimporthandler/lib" />
+    </CLASSES>
+    <JAVADOC />
+    <SOURCES />
+    <jarDirectory url="file://$PROJECT_DIR$/solr/contrib/dataimporthandler/lib" recursive="false" />
+  </library>
+</component>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml
----------------------------------------------------------------------
diff --git a/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml b/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml
index 6268247..8240ff2 100644
--- a/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml
+++ b/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml
@@ -16,9 +16,10 @@
     <orderEntry type="library" scope="TEST" name="HSQLDB" level="project" />
     <orderEntry type="library" scope="TEST" name="Derby" level="project" />
     <orderEntry type="library" scope="TEST" name="Solr DIH test library" level="project" />
-    <orderEntry type="library" scope="TEST" name="Solr example library" level="project" />
+    <orderEntry type="library" name="Solr example library" level="project" />
     <orderEntry type="library" name="Solr core library" level="project" />
     <orderEntry type="library" name="Solrj library" level="project" />
+    <orderEntry type="library" name="Solr DIH core library" level="project" />
     <orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
     <orderEntry type="module" scope="TEST" module-name="solr-test-framework" />
     <orderEntry type="module" module-name="solr-core" />

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 1f3f30c..3cee960 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -47,6 +47,9 @@ API Changes
 * LUCENE-7734: FieldType's copy constructor was widened to accept any IndexableFieldType.
   (David Smiley)
 
+* LUCENE-7701: Grouping collectors have been refactored, such that groups are
+  now defined by a GroupSelector implementation. (Alan Woodward)
+
 Bug Fixes
 
 * LUCENE-7626: IndexWriter will no longer accept broken token offsets
@@ -88,6 +91,25 @@ Other
 
 ======================= Lucene 6.6.0 =======================
 
+Bug Fixes
+
+* LUCENE-7777: ByteBlockPool.readBytes sometimes throws
+  ArrayIndexOutOfBoundsException when byte blocks larger than 32 KB
+  were added (Mike McCandless)
+
+Improvements
+
+* LUCENE-7782: OfflineSorter now passes the total number of items it
+  will write to getWriter (Mike McCandless)
+
+* LUCENE-7785: Move dictionary for Ukrainian analyzer to external dependency.
+  (Andriy Rysin via Steve Rowe, Dawid Weiss)
+
+Optimizations
+
+* LUCENE-7787: spatial-extras HeatmapFacetCounter will now short-circuit it's
+  work when Bits.MatchNoBits is passed. (David Smiley)
+
 Other
 
 * LUCENE-7754: Inner classes should be static whenever possible.
@@ -112,6 +134,9 @@ Bug Fixes
 * LUCENE-7749: Made LRUQueryCache delegate the scoreSupplier method.
   (Martin Amirault via Adrien Grand)
 
+* LUCENE-7769: The UnifiedHighligter wasn't highlighting portions of the query
+  wrapped in BoostQuery or SpanBoostQuery. (David Smiley, Dmitry Malinin)
+
 Other
 
 * LUCENE-7763: Remove outdated comment in IndexWriterConfig.setIndexSort javadocs.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/MIGRATE.txt
----------------------------------------------------------------------
diff --git a/lucene/MIGRATE.txt b/lucene/MIGRATE.txt
index 1ccf56f..c7936a4 100644
--- a/lucene/MIGRATE.txt
+++ b/lucene/MIGRATE.txt
@@ -61,3 +61,16 @@ do not take deleted docs and doc values updates into account.
 Index-time boosts are not supported anymore. As a replacement, index-time
 scoring factors should be indexed in a doc value field and combined with the
 score at query time using FunctionScoreQuery for instance.
+
+## Grouping collector refactoring (LUCENE-7701)
+
+Groups are now defined by GroupSelector classes, making it easier to define new
+types of groups.  Rather than having term or function specific collection
+classes, FirstPassGroupingCollector, AllGroupsCollector and
+AllGroupHeadsCollector are now concrete classes taking a GroupSelector.
+
+SecondPassGroupingCollector is no longer specifically aimed at
+collecting TopDocs for each group, but instead takes a GroupReducer that will
+perform any type of reduction on the top groups collected on a first-pass.  To
+reproduce the old behaviour of SecondPassGroupingCollector, you should instead
+use TopGroupsCollector.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/common/src/java/org/apache/lucene/analysis/commongrams/CommonGramsFilter.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/commongrams/CommonGramsFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/commongrams/CommonGramsFilter.java
index 75e991f..c01e263 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/commongrams/CommonGramsFilter.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/commongrams/CommonGramsFilter.java
@@ -106,7 +106,7 @@ public final class CommonGramsFilter extends TokenFilter {
       saveTermBuffer();
       return true;
     } else if (!input.incrementToken()) {
-        return false;
+      return false;
     }
     
     /* We build n-grams before and after stopwords. 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
index fe71b7e..282c2e7 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailAnalyzer.java
@@ -66,10 +66,11 @@ public final class UAX29URLEmailAnalyzer extends StopwordAnalyzerBase {
   }
 
   /**
-   * Set maximum allowed token length.  If a token is seen
-   * that exceeds this length then it is discarded.  This
-   * setting only takes effect the next time tokenStream or
-   * tokenStream is called.
+   * Set the max allowed token length.  Tokens larger than this will be chopped
+   * up at this token length and emitted as multiple tokens.  If you need to
+   * skip such large tokens, you could increase this max length, and then
+   * use {@code LengthFilter} to remove long tokens.  The default is
+   * {@link UAX29URLEmailAnalyzer#DEFAULT_MAX_TOKEN_LENGTH}.
    */
   public void setMaxTokenLength(int length) {
     maxTokenLength = length;
@@ -92,6 +93,8 @@ public final class UAX29URLEmailAnalyzer extends StopwordAnalyzerBase {
     return new TokenStreamComponents(src, tok) {
       @Override
       protected void setReader(final Reader reader) {
+        // So that if maxTokenLength was changed, the change takes
+        // effect next time tokenStream is called:
         src.setMaxTokenLength(UAX29URLEmailAnalyzer.this.maxTokenLength);
         super.setReader(reader);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
index d2b02e4..842ae51 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.java
@@ -72,19 +72,34 @@ public final class UAX29URLEmailTokenizer extends Tokenizer {
     "<URL>",
     "<EMAIL>",
   };
+
+  /** Absolute maximum sized token */
+  public static final int MAX_TOKEN_LENGTH_LIMIT = 1024 * 1024;
   
   private int skippedPositions;
 
   private int maxTokenLength = StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH;
 
-  /** Set the max allowed token length.  Any token longer
-   *  than this is skipped. */
+  /**
+   * Set the max allowed token length.  Tokens larger than this will be chopped
+   * up at this token length and emitted as multiple tokens.  If you need to
+   * skip such large tokens, you could increase this max length, and then
+   * use {@code LengthFilter} to remove long tokens.  The default is
+   * {@link UAX29URLEmailAnalyzer#DEFAULT_MAX_TOKEN_LENGTH}.
+   * 
+   * @throws IllegalArgumentException if the given length is outside of the
+   *  range [1, {@value #MAX_TOKEN_LENGTH_LIMIT}].
+   */ 
   public void setMaxTokenLength(int length) {
     if (length < 1) {
       throw new IllegalArgumentException("maxTokenLength must be greater than zero");
+    } else if (length > MAX_TOKEN_LENGTH_LIMIT) {
+      throw new IllegalArgumentException("maxTokenLength may not exceed " + MAX_TOKEN_LENGTH_LIMIT);
+    }
+    if (length != maxTokenLength) {
+      this.maxTokenLength = length;
+      scanner.setBufferSize(length);
     }
-    this.maxTokenLength = length;
-    scanner.setBufferSize(Math.min(length, 1024 * 1024)); // limit buffer size to 1M chars
   }
 
   /** @see #setMaxTokenLength */

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java
index 14a5165..b932178 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailAnalyzer.java
@@ -357,4 +357,27 @@ public class TestUAX29URLEmailAnalyzer extends BaseTokenStreamTestCase {
   public void testRandomStrings() throws Exception {
     checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
   }
+
+  public void testMaxTokenLengthDefault() throws Exception {
+
+    StringBuilder bToken = new StringBuilder();
+    // exact max length:
+    for(int i=0;i<StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH;i++) {
+      bToken.append('b');
+    }
+
+    String bString = bToken.toString();
+    // first bString is exact max default length; next one is 1 too long
+    String input = "x " + bString + " " + bString + "b";
+    assertAnalyzesTo(a, input.toString(), new String[] {"x", bString, bString, "b"});
+    a.close();
+  }
+
+  public void testMaxTokenLengthNonDefault() throws Exception {
+    UAX29URLEmailAnalyzer a = new UAX29URLEmailAnalyzer();
+    a.setMaxTokenLength(5);
+    assertAnalyzesTo(a, "ab cd toolong xy z", new String[]{"ab", "cd", "toolo", "ng", "xy", "z"});
+    a.close();
+  }
+  
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java
index eaa5a44..cfe31c9 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/standard/TestUAX29URLEmailTokenizer.java
@@ -105,7 +105,7 @@ public class TestUAX29URLEmailTokenizer extends BaseTokenStreamTestCase {
       @Override
       protected TokenStreamComponents createComponents(String fieldName) {
         UAX29URLEmailTokenizer tokenizer = new UAX29URLEmailTokenizer(newAttributeFactory());
-        tokenizer.setMaxTokenLength(Integer.MAX_VALUE);  // Tokenize arbitrary length URLs
+        tokenizer.setMaxTokenLength(UAX29URLEmailTokenizer.MAX_TOKEN_LENGTH_LIMIT);  // Tokenize arbitrary length URLs
         TokenFilter filter = new URLFilter(tokenizer);
         return new TokenStreamComponents(tokenizer, filter);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/morfologik/ivy.xml
----------------------------------------------------------------------
diff --git a/lucene/analysis/morfologik/ivy.xml b/lucene/analysis/morfologik/ivy.xml
index 1312aab..f0cc234 100644
--- a/lucene/analysis/morfologik/ivy.xml
+++ b/lucene/analysis/morfologik/ivy.xml
@@ -25,6 +25,7 @@
     <dependency org="org.carrot2" name="morfologik-polish" rev="${/org.carrot2/morfologik-polish}" conf="compile"/>
     <dependency org="org.carrot2" name="morfologik-fsa" rev="${/org.carrot2/morfologik-fsa}" conf="compile"/>
     <dependency org="org.carrot2" name="morfologik-stemming" rev="${/org.carrot2/morfologik-stemming}" conf="compile"/>
+    <dependency org="ua.net.nlp" name="morfologik-ukrainian-search" rev="${/ua.net.nlp/morfologik-ukrainian-search}" conf="compile"/>
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/> 
   </dependencies>
 </ivy-module>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/uk/UkrainianMorfologikAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/uk/UkrainianMorfologikAnalyzer.java b/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/uk/UkrainianMorfologikAnalyzer.java
index f9d3b2b..6955fe3 100644
--- a/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/uk/UkrainianMorfologikAnalyzer.java
+++ b/lucene/analysis/morfologik/src/java/org/apache/lucene/analysis/uk/UkrainianMorfologikAnalyzer.java
@@ -107,11 +107,17 @@ public final class UkrainianMorfologikAnalyzer extends StopwordAnalyzerBase {
   @Override
   protected Reader initReader(String fieldName, Reader reader) {
     NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder();
+    // different apostrophes
     builder.add("\u2019", "'");
+    builder.add("\u2018", "'");
     builder.add("\u02BC", "'");
+    builder.add("`", "'");
+    builder.add("�", "'");
+    // ignored characters
     builder.add("\u0301", "");
-    NormalizeCharMap normMap = builder.build();
+    builder.add("\u00AD", "");
 
+    NormalizeCharMap normMap = builder.build();
     reader = new MappingCharFilter(normMap, reader);
     return reader;
   }
@@ -145,7 +151,7 @@ public final class UkrainianMorfologikAnalyzer extends StopwordAnalyzerBase {
 
   private static Dictionary getDictionary() {
     try {
-      return Dictionary.read(UkrainianMorfologikAnalyzer.class.getResource("ukrainian.dict"));
+      return Dictionary.read(UkrainianMorfologikAnalyzer.class.getClassLoader().getResource("ua/net/nlp/ukrainian.dict"));
     } catch (IOException e) {
       throw new RuntimeException(e);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.dict
----------------------------------------------------------------------
diff --git a/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.dict b/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.dict
deleted file mode 100644
index 2468970..0000000
Binary files a/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.dict and /dev/null differ

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.info
----------------------------------------------------------------------
diff --git a/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.info b/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.info
deleted file mode 100644
index 2c69f4b..0000000
--- a/lucene/analysis/morfologik/src/resources/org/apache/lucene/analysis/uk/ukrainian.info
+++ /dev/null
@@ -1,10 +0,0 @@
-#
-# Dictionary properties.
-#
-
-fsa.dict.separator=+
-fsa.dict.encoding=cp1251
-
-fsa.dict.encoder=SUFFIX
-
-fsa.dict.speller.ignore-diacritics=false

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/analysis/morfologik/src/test/org/apache/lucene/analysis/uk/TestUkrainianAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/analysis/morfologik/src/test/org/apache/lucene/analysis/uk/TestUkrainianAnalyzer.java b/lucene/analysis/morfologik/src/test/org/apache/lucene/analysis/uk/TestUkrainianAnalyzer.java
index a38fc63..15b247d 100644
--- a/lucene/analysis/morfologik/src/test/org/apache/lucene/analysis/uk/TestUkrainianAnalyzer.java
+++ b/lucene/analysis/morfologik/src/test/org/apache/lucene/analysis/uk/TestUkrainianAnalyzer.java
@@ -44,22 +44,22 @@ public class TestUkrainianAnalyzer extends BaseTokenStreamTestCase {
 
   public void testSpecialCharsTokenStream() throws Exception {
     Analyzer a = new UkrainianMorfologikAnalyzer();
-    assertAnalyzesTo(a, "\u0426\u044f \u043f\u02bc\u0454\u0441\u0430, \u0443 \u0441\u0432\u043e\u0301\u044e \u0447\u0435\u0440\u0433\u0443 \u0440\u0443\u0445\u0430\u0454\u0442\u044c\u0441\u044f.",
-                     new String[] { "\u043f'\u0454\u0441\u0430", "\u0447\u0435\u0440\u0433\u0430", "\u0440\u0443\u0445\u0430\u0442\u0438\u0441\u044f" });
+    assertAnalyzesTo(a, "\u043c'\u044f\u0441\u0430 \u043c'\u044f\u0301\u0441\u0430 \u043c\u02BC\u044f\u0441\u0430 \u043c\u2019\u044f\u0441\u0430 \u043c\u2018\u044f\u0441\u0430 \u043c`\u044f\u0441\u0430",
+                     new String[] { "\u043c'\u044f\u0441\u043e", "\u043c'\u044f\u0441\u043e", "\u043c'\u044f\u0441\u043e", "\u043c'\u044f\u0441\u043e", "\u043c'\u044f\u0441\u043e", "\u043c'\u044f\u0441\u043e"});
     a.close();
   }
 
   public void testCapsTokenStream() throws Exception {
     Analyzer a = new UkrainianMorfologikAnalyzer();
-    assertAnalyzesTo(a, "\u0426\u0435\u0439 \u0427\u0430\u0439\u043a\u043e\u0432\u0441\u044c\u043a\u0438\u0439 \u0456 \u0490\u0435\u0442\u0435.",
-                     new String[] { "\u0447\u0430\u0439\u043a\u043e\u0432\u0441\u044c\u043a\u0438\u0439", "\u0491\u0435\u0442\u0435" });
+    assertAnalyzesTo(a, "\u0426\u0438\u0445 \u0427\u0430\u0439\u043a\u043e\u0432\u0441\u044c\u043a\u043e\u0433\u043e \u0456 \u0490\u0435\u0442\u0435.",
+                     new String[] { "\u0427\u0430\u0439\u043a\u043e\u0432\u0441\u044c\u043a\u0435", "\u0427\u0430\u0439\u043a\u043e\u0432\u0441\u044c\u043a\u0438\u0439", "\u0490\u0435\u0442\u0435" });
     a.close();
   }
 
   public void testSampleSentence() throws Exception {
     Analyzer a = new UkrainianMorfologikAnalyzer();
     assertAnalyzesTo(a, "\u0426\u0435 \u2014 \u043f\u0440\u043e\u0435\u043a\u0442 \u0433\u0435\u043d\u0435\u0440\u0443\u0432\u0430\u043d\u043d\u044f \u0441\u043b\u043e\u0432\u043d\u0438\u043a\u0430 \u0437 \u0442\u0435\u0433\u0430\u043c\u0438 \u0447\u0430\u0441\u0442\u0438\u043d \u043c\u043e\u0432\u0438 \u0434\u043b\u044f \u0443\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u043e\u0457 \u043c\u043e\u0432\u0438.",
-                     new String[] { "\u043f\u0440\u043e\u0435\u043a\u0442", "\u0433\u0435\u043d\u0435\u0440\u0443\u0432\u0430\u043d\u043d\u044f", "\u0441\u043b\u043e\u0432\u043d\u0438\u043a", "\u0442\u0435\u0433", "\u0447\u0430\u0441\u0442\u0438\u043d\u0430", "\u043c\u043e\u0432\u0430", "\u0443\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430", "\u0443\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0438\u0439", "\u043c\u043e\u0432\u0430" });
+                     new String[] { "\u043f\u0440\u043e\u0435\u043a\u0442", "\u0433\u0435\u043d\u0435\u0440\u0443\u0432\u0430\u043d\u043d\u044f", "\u0441\u043b\u043e\u0432\u043d\u0438\u043a", "\u0442\u0435\u0433", "\u0447\u0430\u0441\u0442\u0438\u043d\u0430", "\u043c\u043e\u0432\u0430", "\u0443\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430", "\u0443\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0438\u0439", "\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430", "\u043c\u043e\u0432\u0430" });
     a.close();
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/classification/src/java/org/apache/lucene/classification/BooleanPerceptronClassifier.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/java/org/apache/lucene/classification/BooleanPerceptronClassifier.java b/lucene/classification/src/java/org/apache/lucene/classification/BooleanPerceptronClassifier.java
index 781a14f..928c036 100644
--- a/lucene/classification/src/java/org/apache/lucene/classification/BooleanPerceptronClassifier.java
+++ b/lucene/classification/src/java/org/apache/lucene/classification/BooleanPerceptronClassifier.java
@@ -58,7 +58,7 @@ import org.apache.lucene.util.fst.Util;
  */
 public class BooleanPerceptronClassifier implements Classifier<Boolean> {
 
-  private final Double threshold;
+  private final Double bias;
   private final Terms textTerms;
   private final Analyzer analyzer;
   private final String textFieldName;
@@ -72,14 +72,14 @@ public class BooleanPerceptronClassifier implements Classifier<Boolean> {
    * @param query          a {@link Query} to eventually filter the docs used for training the classifier, or {@code null}
    *                       if all the indexed docs should be used
    * @param batchSize      the size of the batch of docs to use for updating the perceptron weights
-   * @param threshold      the threshold used for class separation
+   * @param bias      the bias used for class separation
    * @param classFieldName the name of the field used as the output for the classifier
    * @param textFieldName  the name of the field used as input for the classifier
    * @throws IOException if the building of the underlying {@link FST} fails and / or {@link TermsEnum} for the text field
    *                     cannot be found
    */
   public BooleanPerceptronClassifier(IndexReader indexReader, Analyzer analyzer, Query query, Integer batchSize,
-                                     Double threshold, String classFieldName, String textFieldName) throws IOException {
+                                     Double bias, String classFieldName, String textFieldName) throws IOException {
     this.textTerms = MultiFields.getTerms(indexReader, textFieldName);
 
     if (textTerms == null) {
@@ -89,18 +89,18 @@ public class BooleanPerceptronClassifier implements Classifier<Boolean> {
     this.analyzer = analyzer;
     this.textFieldName = textFieldName;
 
-    if (threshold == null || threshold == 0d) {
-      // automatic assign a threshold
-      long sumDocFreq = indexReader.getSumDocFreq(textFieldName);
-      if (sumDocFreq != -1) {
-        this.threshold = (double) sumDocFreq / 2d;
+    if (bias == null || bias == 0d) {
+      // automatic assign the bias to be the average total term freq
+      double t = (double) indexReader.getSumTotalTermFreq(textFieldName) / (double) indexReader.getDocCount(textFieldName);
+      if (t != -1) {
+        this.bias = t;
       } else {
         throw new IOException(
-                "threshold cannot be assigned since term vectors for field "
+                "bias cannot be assigned since term vectors for field "
                         + textFieldName + " do not exist");
       }
     } else {
-      this.threshold = threshold;
+      this.bias = bias;
     }
 
     // TODO : remove this map as soon as we have a writable FST
@@ -173,7 +173,7 @@ public class BooleanPerceptronClassifier implements Classifier<Boolean> {
         // update weights
         Long previousValue = Util.get(fst, term);
         String termString = term.utf8ToString();
-        weights.put(termString, previousValue == null ? 0 : previousValue + modifier * termFreqLocal);
+        weights.put(termString, previousValue == null ? 0 : Math.max(0, previousValue + modifier * termFreqLocal));
       }
     }
     if (updateFST) {
@@ -216,8 +216,8 @@ public class BooleanPerceptronClassifier implements Classifier<Boolean> {
       tokenStream.end();
     }
 
-    double score = 1 - Math.exp(-1 * Math.abs(threshold - output.doubleValue()) / threshold);
-    return new ClassificationResult<>(output >= threshold, score);
+    double score = 1 - Math.exp(-1 * Math.abs(bias - output.doubleValue()) / bias);
+    return new ClassificationResult<>(output >= bias, score);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/classification/src/java/org/apache/lucene/classification/KNearestNeighborClassifier.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/java/org/apache/lucene/classification/KNearestNeighborClassifier.java b/lucene/classification/src/java/org/apache/lucene/classification/KNearestNeighborClassifier.java
index 77f0416..f0391f4 100644
--- a/lucene/classification/src/java/org/apache/lucene/classification/KNearestNeighborClassifier.java
+++ b/lucene/classification/src/java/org/apache/lucene/classification/KNearestNeighborClassifier.java
@@ -38,7 +38,7 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.WildcardQuery;
-import org.apache.lucene.search.similarities.ClassicSimilarity;
+import org.apache.lucene.search.similarities.BM25Similarity;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.util.BytesRef;
 
@@ -86,7 +86,7 @@ public class KNearestNeighborClassifier implements Classifier<BytesRef> {
    * @param indexReader     the reader on the index to be used for classification
    * @param analyzer       an {@link Analyzer} used to analyze unseen text
    * @param similarity     the {@link Similarity} to be used by the underlying {@link IndexSearcher} or {@code null}
-   *                       (defaults to {@link org.apache.lucene.search.similarities.ClassicSimilarity})
+   *                       (defaults to {@link org.apache.lucene.search.similarities.BM25Similarity})
    * @param query          a {@link Query} to eventually filter the docs used for training the classifier, or {@code null}
    *                       if all the indexed docs should be used
    * @param k              the no. of docs to select in the MLT results to find the nearest neighbor
@@ -106,7 +106,7 @@ public class KNearestNeighborClassifier implements Classifier<BytesRef> {
     if (similarity != null) {
       this.indexSearcher.setSimilarity(similarity);
     } else {
-      this.indexSearcher.setSimilarity(new ClassicSimilarity());
+      this.indexSearcher.setSimilarity(new BM25Similarity());
     }
     if (minDocsFreq > 0) {
       mlt.setMinDocFreq(minDocsFreq);
@@ -124,7 +124,13 @@ public class KNearestNeighborClassifier implements Classifier<BytesRef> {
    */
   @Override
   public ClassificationResult<BytesRef> assignClass(String text) throws IOException {
-    TopDocs knnResults = knnSearch(text);
+    return classifyFromTopDocs(knnSearch(text));
+  }
+
+  /**
+   * TODO
+   */
+  protected ClassificationResult<BytesRef> classifyFromTopDocs(TopDocs knnResults) throws IOException {
     List<ClassificationResult<BytesRef>> assignedClasses = buildListFromTopDocs(knnResults);
     ClassificationResult<BytesRef> assignedClass = null;
     double maxscore = -Double.MAX_VALUE;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/classification/src/java/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifier.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/java/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifier.java b/lucene/classification/src/java/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifier.java
index e01090a..88d41fc 100644
--- a/lucene/classification/src/java/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifier.java
+++ b/lucene/classification/src/java/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifier.java
@@ -56,7 +56,7 @@ public class KNearestNeighborDocumentClassifier extends KNearestNeighborClassifi
    *
    * @param indexReader     the reader on the index to be used for classification
    * @param similarity     the {@link Similarity} to be used by the underlying {@link IndexSearcher} or {@code null}
-   *                       (defaults to {@link org.apache.lucene.search.similarities.ClassicSimilarity})
+   *                       (defaults to {@link org.apache.lucene.search.similarities.BM25Similarity})
    * @param query          a {@link org.apache.lucene.search.Query} to eventually filter the docs used for training the classifier, or {@code null}
    *                       if all the indexed docs should be used
    * @param k              the no. of docs to select in the MLT results to find the nearest neighbor
@@ -77,17 +77,7 @@ public class KNearestNeighborDocumentClassifier extends KNearestNeighborClassifi
    */
   @Override
   public ClassificationResult<BytesRef> assignClass(Document document) throws IOException {
-    TopDocs knnResults = knnSearch(document);
-    List<ClassificationResult<BytesRef>> assignedClasses = buildListFromTopDocs(knnResults);
-    ClassificationResult<BytesRef> assignedClass = null;
-    double maxscore = -Double.MAX_VALUE;
-    for (ClassificationResult<BytesRef> cl : assignedClasses) {
-      if (cl.getScore() > maxscore) {
-        assignedClass = cl;
-        maxscore = cl.getScore();
-      }
-    }
-    return assignedClass;
+    return classifyFromTopDocs(knnSearch(document));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/classification/src/test/org/apache/lucene/classification/BooleanPerceptronClassifierTest.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/BooleanPerceptronClassifierTest.java b/lucene/classification/src/test/org/apache/lucene/classification/BooleanPerceptronClassifierTest.java
index 6ea92c0..5ecf9c6 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/BooleanPerceptronClassifierTest.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/BooleanPerceptronClassifierTest.java
@@ -19,8 +19,12 @@ package org.apache.lucene.classification;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.classification.utils.ConfusionMatrixGenerator;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.Term;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.util.BytesRef;
 import org.junit.Test;
 
 /**
@@ -34,7 +38,9 @@ public class BooleanPerceptronClassifierTest extends ClassificationTestBase<Bool
     try {
       MockAnalyzer analyzer = new MockAnalyzer(random());
       leafReader = getSampleIndex(analyzer);
-      checkCorrectClassification(new BooleanPerceptronClassifier(leafReader, analyzer, null, 1, null, booleanFieldName, textFieldName), TECHNOLOGY_INPUT, false);
+      BooleanPerceptronClassifier classifier = new BooleanPerceptronClassifier(leafReader, analyzer, null, 1, null, booleanFieldName, textFieldName);
+      checkCorrectClassification(classifier, TECHNOLOGY_INPUT, false);
+      checkCorrectClassification(classifier, POLITICS_INPUT, true);
     } finally {
       if (leafReader != null) {
         leafReader.close();
@@ -60,12 +66,14 @@ public class BooleanPerceptronClassifierTest extends ClassificationTestBase<Bool
 
   @Test
   public void testBasicUsageWithQuery() throws Exception {
-    TermQuery query = new TermQuery(new Term(textFieldName, "it"));
+    TermQuery query = new TermQuery(new Term(textFieldName, "of"));
     LeafReader leafReader = null;
     try {
       MockAnalyzer analyzer = new MockAnalyzer(random());
       leafReader = getSampleIndex(analyzer);
-      checkCorrectClassification(new BooleanPerceptronClassifier(leafReader, analyzer, query, 1, null, booleanFieldName, textFieldName), TECHNOLOGY_INPUT, false);
+      BooleanPerceptronClassifier classifier = new BooleanPerceptronClassifier(leafReader, analyzer, query, 1, null, booleanFieldName, textFieldName);
+      checkCorrectClassification(classifier, TECHNOLOGY_INPUT, false);
+      checkCorrectClassification(classifier, POLITICS_INPUT, true);
     } finally {
       if (leafReader != null) {
         leafReader.close();
@@ -86,16 +94,45 @@ public class BooleanPerceptronClassifierTest extends ClassificationTestBase<Bool
 
       long evaluationStart = System.currentTimeMillis();
       ConfusionMatrixGenerator.ConfusionMatrix confusionMatrix = ConfusionMatrixGenerator.getConfusionMatrix(leafReader,
-          classifier, categoryFieldName, textFieldName, -1);
+          classifier, booleanFieldName, textFieldName, -1);
       assertNotNull(confusionMatrix);
       long evaluationEnd = System.currentTimeMillis();
       long evaluationTime = evaluationEnd - evaluationStart;
       assertTrue("evaluation took more than 1m: " + evaluationTime / 1000 + "s", evaluationTime < 60000);
       double avgClassificationTime = confusionMatrix.getAvgClassificationTime();
       assertTrue(5000 > avgClassificationTime);
-      // accuracy check disabled until LUCENE-6853 is fixed
-//      double accuracy = confusionMatrix.getAccuracy();
-//      assertTrue(accuracy > 0d);
+
+      double f1 = confusionMatrix.getF1Measure();
+      assertTrue(f1 >= 0d);
+      assertTrue(f1 <= 1d);
+
+      double accuracy = confusionMatrix.getAccuracy();
+      assertTrue(accuracy >= 0d);
+      assertTrue(accuracy <= 1d);
+
+      double recall = confusionMatrix.getRecall();
+      assertTrue(recall >= 0d);
+      assertTrue(recall <= 1d);
+
+      double precision = confusionMatrix.getPrecision();
+      assertTrue(precision >= 0d);
+      assertTrue(precision <= 1d);
+
+      Terms terms = MultiFields.getTerms(leafReader, booleanFieldName);
+      TermsEnum iterator = terms.iterator();
+      BytesRef term;
+      while ((term = iterator.next()) != null) {
+        String s = term.utf8ToString();
+        recall = confusionMatrix.getRecall(s);
+        assertTrue(recall >= 0d);
+        assertTrue(recall <= 1d);
+        precision = confusionMatrix.getPrecision(s);
+        assertTrue(precision >= 0d);
+        assertTrue(precision <= 1d);
+        double f1Measure = confusionMatrix.getF1Measure(s);
+        assertTrue(f1Measure >= 0d);
+        assertTrue(f1Measure <= 1d);
+      }
     } finally {
       leafReader.close();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
index 331a74b..6c8f7fd 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
@@ -88,8 +88,9 @@ public abstract class ClassificationTestBase<T> extends LuceneTestCase {
 
   protected ClassificationResult<T> checkCorrectClassification(Classifier<T> classifier, String inputDoc, T expectedResult) throws Exception {
     ClassificationResult<T> classificationResult = classifier.assignClass(inputDoc);
-    assertNotNull(classificationResult.getAssignedClass());
-    assertEquals("got an assigned class of " + classificationResult.getAssignedClass(), expectedResult, classificationResult.getAssignedClass());
+    T assignedClass = classificationResult.getAssignedClass();
+    assertNotNull(assignedClass);
+    assertEquals("got an assigned class of " + assignedClass, expectedResult instanceof BytesRef ? ((BytesRef) expectedResult).utf8ToString() : expectedResult, assignedClass instanceof BytesRef ? ((BytesRef) assignedClass).utf8ToString() : assignedClass);
     double score = classificationResult.getScore();
     assertTrue("score should be between 0 and 1, got:" + score, score <= 1 && score >= 0);
     return classificationResult;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/classification/src/test/org/apache/lucene/classification/SimpleNaiveBayesClassifierTest.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/SimpleNaiveBayesClassifierTest.java b/lucene/classification/src/test/org/apache/lucene/classification/SimpleNaiveBayesClassifierTest.java
index 2b4873d..0e05d4f 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/SimpleNaiveBayesClassifierTest.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/SimpleNaiveBayesClassifierTest.java
@@ -59,8 +59,10 @@ public class SimpleNaiveBayesClassifierTest extends ClassificationTestBase<Bytes
     try {
       MockAnalyzer analyzer = new MockAnalyzer(random());
       leafReader = getSampleIndex(analyzer);
-      TermQuery query = new TermQuery(new Term(textFieldName, "it"));
-      checkCorrectClassification(new SimpleNaiveBayesClassifier(leafReader, analyzer, query, categoryFieldName, textFieldName), TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
+      TermQuery query = new TermQuery(new Term(textFieldName, "a"));
+      SimpleNaiveBayesClassifier classifier = new SimpleNaiveBayesClassifier(leafReader, analyzer, query, categoryFieldName, textFieldName);
+      checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
+      checkCorrectClassification(classifier, POLITICS_INPUT, POLITICS_RESULT);
     } finally {
       if (leafReader != null) {
         leafReader.close();
@@ -112,6 +114,11 @@ public class SimpleNaiveBayesClassifierTest extends ClassificationTestBase<Bytes
       assertTrue("evaluation took more than 2m: " + evaluationTime / 1000 + "s", evaluationTime < 120000);
       double avgClassificationTime = confusionMatrix.getAvgClassificationTime();
       assertTrue("avg classification time: " + avgClassificationTime, 5000 > avgClassificationTime);
+
+      double f1 = confusionMatrix.getF1Measure();
+      assertTrue(f1 >= 0d);
+      assertTrue(f1 <= 1d);
+
       double accuracy = confusionMatrix.getAccuracy();
       assertTrue(accuracy >= 0d);
       assertTrue(accuracy <= 1d);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/classification/src/test/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifierTest.java
----------------------------------------------------------------------
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifierTest.java b/lucene/classification/src/test/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifierTest.java
index 8c885fb..a323724 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifierTest.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/document/KNearestNeighborDocumentClassifierTest.java
@@ -33,8 +33,9 @@ public class KNearestNeighborDocumentClassifierTest extends DocumentClassificati
     try {
       Document videoGameDocument = getVideoGameDocument();
       Document batmanDocument = getBatmanDocument();
-      checkCorrectDocumentClassification(new KNearestNeighborDocumentClassifier(indexReader,null, null, 1, 1, 1, categoryFieldName, field2analyzer, new String[]{textFieldName, titleFieldName, authorFieldName}), videoGameDocument, VIDEOGAME_RESULT);
-      checkCorrectDocumentClassification(new KNearestNeighborDocumentClassifier(indexReader,null, null, 1, 1, 1, categoryFieldName, field2analyzer, new String[]{textFieldName, titleFieldName, authorFieldName}), batmanDocument, BATMAN_RESULT);
+      KNearestNeighborDocumentClassifier classifier = new KNearestNeighborDocumentClassifier(indexReader, null, null, 1, 4, 1, categoryFieldName, field2analyzer, new String[]{textFieldName, titleFieldName, authorFieldName});
+      checkCorrectDocumentClassification(classifier, videoGameDocument, VIDEOGAME_RESULT);
+      checkCorrectDocumentClassification(classifier, batmanDocument, BATMAN_RESULT);
       // considering only the text we have wrong classification because the text was ambiguos on purpose
       checkCorrectDocumentClassification(new KNearestNeighborDocumentClassifier(indexReader,null, null, 1, 1, 1, categoryFieldName, field2analyzer, new String[]{textFieldName}), videoGameDocument, BATMAN_RESULT);
       checkCorrectDocumentClassification(new KNearestNeighborDocumentClassifier(indexReader,null, null, 1, 1, 1, categoryFieldName, field2analyzer, new String[]{textFieldName}), batmanDocument, VIDEOGAME_RESULT);
@@ -51,9 +52,10 @@ public class KNearestNeighborDocumentClassifierTest extends DocumentClassificati
     try {
       Document videoGameDocument = getVideoGameDocument();
       Document batmanDocument = getBatmanDocument();
-      double score1 = checkCorrectDocumentClassification(new KNearestNeighborDocumentClassifier(indexReader,null, null, 1, 1, 1, categoryFieldName, field2analyzer, new String[]{textFieldName, titleFieldName, authorFieldName}), videoGameDocument, VIDEOGAME_RESULT);
+      KNearestNeighborDocumentClassifier classifier = new KNearestNeighborDocumentClassifier(indexReader, null, null, 1, 4, 1, categoryFieldName, field2analyzer, new String[]{textFieldName, titleFieldName, authorFieldName});
+      double score1 = checkCorrectDocumentClassification(classifier, videoGameDocument, VIDEOGAME_RESULT);
       assertEquals(1.0,score1,0);
-      double score2 = checkCorrectDocumentClassification(new KNearestNeighborDocumentClassifier(indexReader,null, null, 1, 1, 1, categoryFieldName, field2analyzer, new String[]{textFieldName, titleFieldName, authorFieldName}), batmanDocument, BATMAN_RESULT);
+      double score2 = checkCorrectDocumentClassification(classifier, batmanDocument, BATMAN_RESULT);
       assertEquals(1.0,score2,0);
       // considering only the text we have wrong classification because the text was ambiguos on purpose
       double score3 = checkCorrectDocumentClassification(new KNearestNeighborDocumentClassifier(indexReader,null, null, 1, 1, 1, categoryFieldName, field2analyzer, new String[]{textFieldName}), videoGameDocument, BATMAN_RESULT);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java
----------------------------------------------------------------------
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java
index d7674ed..86697eb 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java
@@ -881,7 +881,7 @@ final class SimpleTextBKDWriter implements Closeable {
 
           /** We write/read fixed-byte-width file that {@link OfflinePointReader} can read. */
           @Override
-          protected ByteSequencesWriter getWriter(IndexOutput out) {
+          protected ByteSequencesWriter getWriter(IndexOutput out, long count) {
             return new ByteSequencesWriter(out) {
               @Override
               public void write(byte[] bytes, int off, int len) throws IOException {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
index fb57573..8afffd8 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardAnalyzer.java
@@ -81,10 +81,11 @@ public final class StandardAnalyzer extends StopwordAnalyzerBase {
   }
 
   /**
-   * Set maximum allowed token length.  If a token is seen
-   * that exceeds this length then it is discarded.  This
-   * setting only takes effect the next time tokenStream or
-   * tokenStream is called.
+   * Set the max allowed token length.  Tokens larger than this will be chopped
+   * up at this token length and emitted as multiple tokens.  If you need to
+   * skip such large tokens, you could increase this max length, and then
+   * use {@code LengthFilter} to remove long tokens.  The default is
+   * {@link StandardAnalyzer#DEFAULT_MAX_TOKEN_LENGTH}.
    */
   public void setMaxTokenLength(int length) {
     maxTokenLength = length;
@@ -107,6 +108,8 @@ public final class StandardAnalyzer extends StopwordAnalyzerBase {
     return new TokenStreamComponents(src, tok) {
       @Override
       protected void setReader(final Reader reader) {
+        // So that if maxTokenLength was changed, the change takes
+        // effect next time tokenStream is called:
         src.setMaxTokenLength(StandardAnalyzer.this.maxTokenLength);
         super.setReader(reader);
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
index 5b8fc75..ed52f03 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizer.java
@@ -105,7 +105,11 @@ public final class StandardTokenizer extends Tokenizer {
   private int maxTokenLength = StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH;
 
   /**
-   * Set the max allowed token length.  No tokens longer than this are emitted.
+   * Set the max allowed token length.  Tokens larger than this will be chopped
+   * up at this token length and emitted as multiple tokens.  If you need to
+   * skip such large tokens, you could increase this max length, and then
+   * use {@code LengthFilter} to remove long tokens.  The default is
+   * {@link StandardAnalyzer#DEFAULT_MAX_TOKEN_LENGTH}.
    * 
    * @throws IllegalArgumentException if the given length is outside of the
    *  range [1, {@value #MAX_TOKEN_LENGTH_LIMIT}].

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java b/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
index 9da1e09..e5aae4f 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
@@ -456,7 +456,7 @@ class BufferedUpdatesStream implements Accountable {
       try {
         segStates[j].finish(pool);
       } catch (Throwable th) {
-        if (firstExc != null) {
+        if (firstExc == null) {
           firstExc = th;
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
index 88dd6a1..3cd796b 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MultiDocValues.java
@@ -1031,7 +1031,7 @@ public class MultiDocValues {
     }
     
     @Override
-    public int ordValue() {
+    public int ordValue() throws IOException {
       return (int) mapping.getGlobalOrds(nextLeaf-1).get(currentValues.ordValue());
     }
  

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
index ce2050f..4838799 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java
@@ -85,7 +85,7 @@ final class SegmentDocValues {
       try {
         dvp.decRef();
       } catch (Throwable th) {
-        if (t != null) {
+        if (t == null) {
           t = th;
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
index ccbcdf9..930340c 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
@@ -294,7 +294,7 @@ public final class SegmentReader extends CodecReader {
     synchronized(readerClosedListeners) {
       for(ClosedListener listener : readerClosedListeners) {
         try {
-          listener.onClose(cacheHelper.getKey());
+          listener.onClose(readerCacheHelper.getKey());
         } catch (Throwable t) {
           if (th == null) {
             th = t;
@@ -307,7 +307,7 @@ public final class SegmentReader extends CodecReader {
     }
   }
 
-  private final IndexReader.CacheHelper cacheHelper = new IndexReader.CacheHelper() {
+  private final IndexReader.CacheHelper readerCacheHelper = new IndexReader.CacheHelper() {
     private final IndexReader.CacheKey cacheKey = new IndexReader.CacheKey();
 
     @Override
@@ -317,18 +317,35 @@ public final class SegmentReader extends CodecReader {
 
     @Override
     public void addClosedListener(ClosedListener listener) {
+      ensureOpen();
       readerClosedListeners.add(listener);
     }
   };
 
   @Override
   public CacheHelper getReaderCacheHelper() {
-    return cacheHelper;
+    return readerCacheHelper;
   }
 
+  /** Wrap the cache helper of the core to add ensureOpen() calls that make
+   *  sure users do not register closed listeners on closed indices. */
+  private final IndexReader.CacheHelper coreCacheHelper = new IndexReader.CacheHelper() {
+
+    @Override
+    public CacheKey getKey() {
+      return core.getCacheHelper().getKey();
+    }
+
+    @Override
+    public void addClosedListener(ClosedListener listener) {
+      ensureOpen();
+      core.getCacheHelper().addClosedListener(listener);
+    }
+  };
+
   @Override
   public CacheHelper getCoreCacheHelper() {
-    return core.getCacheHelper();
+    return coreCacheHelper;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
index 087e487..bd23415 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValues.java
@@ -46,7 +46,7 @@ public abstract class SortedDocValues extends BinaryDocValues {
    * @return ordinal for the document: this is dense, starts at 0, then
    *         increments by 1 for the next value in sorted order.
    */
-  public abstract int ordValue();
+  public abstract int ordValue() throws IOException;
 
   /** Retrieves the value for the specified ordinal. The returned
    * {@link BytesRef} may be re-used across calls to {@link #lookupOrd(int)}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
index f0e7e98..bedf17e 100644
--- a/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/StandardDirectoryReader.java
@@ -484,7 +484,8 @@ public final class StandardDirectoryReader extends DirectoryReader {
 
     @Override
     public void addClosedListener(ClosedListener listener) {
-        readerClosedListeners.add(listener);
+      ensureOpen();
+      readerClosedListeners.add(listener);
     }
 
   };

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
index 8216201..3fe0e93 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
@@ -883,14 +883,6 @@ public abstract class FieldComparator<T> {
       return DocValues.getBinary(context.reader(), field);
     }
 
-    /** Check whether the given value represents <tt>null</tt>. This can be
-     *  useful if the {@link BinaryDocValues} returned by {@link #getBinaryDocValues}
-     *  use a special value as a sentinel.
-     *  <p>NOTE: The null value can only be an EMPTY {@link BytesRef}. */
-    protected boolean isNull(int doc, BytesRef term) throws IOException {
-      return getValueForDoc(doc) == null;
-    }
-
     @Override
     public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
       docTerms = getBinaryDocValues(context, field);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
index 1ec322f..97589c4 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
@@ -320,11 +320,9 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
    *          <code>trackDocScores</code> to true as well.
    * @return a {@link TopFieldCollector} instance which will sort the results by
    *         the sort criteria.
-   * @throws IOException if there is a low-level I/O error
    */
   public static TopFieldCollector create(Sort sort, int numHits,
-      boolean fillFields, boolean trackDocScores, boolean trackMaxScore)
-      throws IOException {
+      boolean fillFields, boolean trackDocScores, boolean trackMaxScore) {
     return create(sort, numHits, null, fillFields, trackDocScores, trackMaxScore);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java b/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java
index 1b71440..af8e195 100644
--- a/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java
+++ b/lucene/core/src/java/org/apache/lucene/util/ByteBlockPool.java
@@ -324,28 +324,25 @@ public final class ByteBlockPool {
    * the current position.
    */
   public void append(final BytesRef bytes) {
-    int length = bytes.length;
-    if (length == 0) {
-      return;
-    }
+    int bytesLeft = bytes.length;
     int offset = bytes.offset;
-    int overflow = (length + byteUpto) - BYTE_BLOCK_SIZE;
-    do {
-      if (overflow <= 0) { 
-        System.arraycopy(bytes.bytes, offset, buffer, byteUpto, length);
-        byteUpto += length;
+    while (bytesLeft > 0) {
+      int bufferLeft = BYTE_BLOCK_SIZE - byteUpto;
+      if (bytesLeft < bufferLeft) {
+        // fits within current buffer
+        System.arraycopy(bytes.bytes, offset, buffer, byteUpto, bytesLeft);
+        byteUpto += bytesLeft;
         break;
       } else {
-        final int bytesToCopy = length-overflow;
-        if (bytesToCopy > 0) {
-          System.arraycopy(bytes.bytes, offset, buffer, byteUpto, bytesToCopy);
-          offset += bytesToCopy;
-          length -= bytesToCopy;
+        // fill up this buffer and move to next one
+        if (bufferLeft > 0) {
+          System.arraycopy(bytes.bytes, offset, buffer, byteUpto, bufferLeft);
         }
         nextBuffer();
-        overflow = overflow - BYTE_BLOCK_SIZE;
+        bytesLeft -= bufferLeft;
+        offset += bufferLeft;
       }
-    }  while(true);
+    }
   }
   
   /**
@@ -353,30 +350,18 @@ public final class ByteBlockPool {
    * length into the given byte array at offset <tt>off</tt>.
    * <p>Note: this method allows to copy across block boundaries.</p>
    */
-  public void readBytes(final long offset, final byte bytes[], final int off, final int length) {
-    if (length == 0) {
-      return;
-    }
-    int bytesOffset = off;
-    int bytesLength = length;
+  public void readBytes(final long offset, final byte bytes[], int bytesOffset, int bytesLength) {
+    int bytesLeft = bytesLength;
     int bufferIndex = (int) (offset >> BYTE_BLOCK_SHIFT);
-    byte[] buffer = buffers[bufferIndex];
     int pos = (int) (offset & BYTE_BLOCK_MASK);
-    int overflow = (pos + length) - BYTE_BLOCK_SIZE;
-    do {
-      if (overflow <= 0) {
-        System.arraycopy(buffer, pos, bytes, bytesOffset, bytesLength);
-        break;
-      } else {
-        final int bytesToCopy = length - overflow;
-        System.arraycopy(buffer, pos, bytes, bytesOffset, bytesToCopy);
-        pos = 0;
-        bytesLength -= bytesToCopy;
-        bytesOffset += bytesToCopy;
-        buffer = buffers[++bufferIndex];
-        overflow = overflow - BYTE_BLOCK_SIZE;
-      }
-    } while (true);
+    while (bytesLeft > 0) {
+      byte[] buffer = buffers[bufferIndex++];
+      int chunk = Math.min(bytesLeft, BYTE_BLOCK_SIZE - pos);
+      System.arraycopy(buffer, pos, bytes, bytesOffset, chunk);
+      bytesOffset += chunk;
+      bytesLeft -= chunk;
+      pos = 0;
+    }
   }
 
   /**