You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2017/04/20 10:20:46 UTC

[13/23] lucene-solr:feature/autoscaling: Squash-merge from master.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
index 3da3541..1d8aecb 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
@@ -33,7 +33,7 @@ import java.util.Iterator;
 import java.util.List;
 
 /**
- * Accumulates statistics separated by a slot number.
+ * Accumulates statistics separated by a slot number. 
  * There is a separate statistic per slot. The slot is usually an ordinal into a set of values, e.g. tracking a count
  * frequency <em>per term</em>.
  * Sometimes there doesn't need to be a slot distinction, in which case there is just one nominal slot.
@@ -46,8 +46,7 @@ public abstract class SlotAcc implements Closeable {
     this.fcontext = fcontext;
   }
 
-  public void setNextReader(LeafReaderContext readerContext) throws IOException {
-  }
+  public void setNextReader(LeafReaderContext readerContext) throws IOException {}
 
   public abstract void collect(int doc, int slot) throws IOException;
 
@@ -61,7 +60,7 @@ public abstract class SlotAcc implements Closeable {
     int segBase = 0;
     int segMax;
     int adjustedMax = 0;
-    for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) {
+    for (DocIterator docsIt = docs.iterator(); docsIt.hasNext();) {
       final int doc = docsIt.nextDoc();
       if (doc >= adjustedMax) {
         do {
@@ -78,12 +77,11 @@ public abstract class SlotAcc implements Closeable {
         setNextReader(ctx);
       }
       count++;
-      collect(doc - segBase, slot);  // per-seg collectors
+      collect(doc - segBase, slot); // per-seg collectors
     }
     return count;
   }
 
-
   public abstract int compare(int slotA, int slotB);
 
   public abstract Object getValue(int slotNum) throws IOException;
@@ -101,8 +99,7 @@ public abstract class SlotAcc implements Closeable {
   public abstract void resize(Resizer resizer);
 
   @Override
-  public void close() throws IOException {
-  }
+  public void close() throws IOException {}
 
   public static abstract class Resizer {
     public abstract int getNewSize();
@@ -181,15 +178,14 @@ abstract class FuncSlotAcc extends SlotAcc {
   }
 }
 
-
-// have a version that counts the number of times a Slot has been hit?  (for avg... what else?)
+// have a version that counts the number of times a Slot has been hit? (for avg... what else?)
 
 // TODO: make more sense to have func as the base class rather than double?
 // double-slot-func -> func-slot -> slot -> acc
 // double-slot-func -> double-slot -> slot -> acc
 
 abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
-  double[] result;  // TODO: use DoubleArray
+  double[] result; // TODO: use DoubleArray
   double initialValue;
 
   public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
@@ -210,7 +206,6 @@ abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
     return Double.compare(result[slotA], result[slotB]);
   }
 
-
   @Override
   public Object getValue(int slot) {
     return result[slot];
@@ -228,7 +223,7 @@ abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
 }
 
 abstract class IntSlotAcc extends SlotAcc {
-  int[] result;  // use LongArray32
+  int[] result; // use LongArray32
   int initialValue;
 
   public IntSlotAcc(FacetContext fcontext, int numSlots, int initialValue) {
@@ -261,15 +256,13 @@ abstract class IntSlotAcc extends SlotAcc {
   }
 }
 
-
-
 class SumSlotAcc extends DoubleFuncSlotAcc {
   public SumSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
     super(values, fcontext, numSlots);
   }
 
   public void collect(int doc, int slotNum) throws IOException {
-    double val = values.doubleVal(doc);  // todo: worth trying to share this value across multiple stats that need it?
+    double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it?
     result[slotNum] += val;
   }
 }
@@ -287,8 +280,6 @@ class SumsqSlotAcc extends DoubleFuncSlotAcc {
   }
 }
 
-
-
 class MinSlotAcc extends DoubleFuncSlotAcc {
   public MinSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
     super(values, fcontext, numSlots, Double.NaN);
@@ -297,10 +288,10 @@ class MinSlotAcc extends DoubleFuncSlotAcc {
   @Override
   public void collect(int doc, int slotNum) throws IOException {
     double val = values.doubleVal(doc);
-    if (val == 0 && !values.exists(doc)) return;  // depend on fact that non existing values return 0 for func query
+    if (val == 0 && !values.exists(doc)) return; // depend on fact that non existing values return 0 for func query
 
     double currMin = result[slotNum];
-    if (!(val >= currMin)) {  // val>=currMin will be false for staring value: val>=NaN
+    if (!(val >= currMin)) { // val>=currMin will be false for staring value: val>=NaN
       result[slotNum] = val;
     }
   }
@@ -314,17 +305,16 @@ class MaxSlotAcc extends DoubleFuncSlotAcc {
   @Override
   public void collect(int doc, int slotNum) throws IOException {
     double val = values.doubleVal(doc);
-    if (val == 0 && !values.exists(doc)) return;  // depend on fact that non existing values return 0 for func query
+    if (val == 0 && !values.exists(doc)) return; // depend on fact that non existing values return 0 for func query
 
     double currMax = result[slotNum];
-    if (!(val <= currMax)) {  // reversed order to handle NaN
+    if (!(val <= currMax)) { // reversed order to handle NaN
       result[slotNum] = val;
     }
   }
 
 }
 
-
 class AvgSlotAcc extends DoubleFuncSlotAcc {
   int[] counts;
 
@@ -336,7 +326,7 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   @Override
   public void reset() {
     super.reset();
-    for (int i=0; i<counts.length; i++) {
+    for (int i = 0; i < counts.length; i++) {
       counts[i] = 0;
     }
   }
@@ -351,11 +341,12 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   }
 
   private double avg(double tot, int count) {
-    return count==0 ? 0 : tot/count;  // returns 0 instead of NaN.. todo - make configurable? if NaN, we need to handle comparisons though...
+    return count == 0 ? 0 : tot / count; // returns 0 instead of NaN.. todo - make configurable? if NaN, we need to
+                                         // handle comparisons though...
   }
 
   private double avg(int slot) {
-    return avg(result[slot], counts[slot]);  // calc once and cache in result?
+    return avg(result[slot], counts[slot]); // calc once and cache in result?
   }
 
   @Override
@@ -367,8 +358,8 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   public Object getValue(int slot) {
     if (fcontext.isShard()) {
       ArrayList lst = new ArrayList(2);
-      lst.add( counts[slot] );
-      lst.add( result[slot] );
+      lst.add(counts[slot]);
+      lst.add(result[slot]);
       return lst;
     } else {
       return avg(slot);
@@ -382,32 +373,157 @@ class AvgSlotAcc extends DoubleFuncSlotAcc {
   }
 }
 
+class VarianceSlotAcc extends DoubleFuncSlotAcc {
+  int[] counts;
+  double[] sum;
+
+  public VarianceSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+    super(values, fcontext, numSlots);
+    counts = new int[numSlots];
+    sum = new double[numSlots];
+  }
+
+  @Override
+  public void reset() {
+    super.reset();
+    Arrays.fill(counts, 0);
+    Arrays.fill(sum, 0);
+  }
+
+  @Override
+  public void resize(Resizer resizer) {
+    super.resize(resizer);
+    this.counts = resizer.resize(this.counts, 0);
+    this.sum = resizer.resize(this.sum, 0);
+  }
+
+  private double variance(double sumSq, double sum, int count) {
+    double val = count == 0 ? 0 : (sumSq / count) - Math.pow(sum / count, 2);
+    return val;
+  }
+
+  private double variance(int slot) {
+    return variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
+  }
+
+  @Override
+  public int compare(int slotA, int slotB) {
+    return Double.compare(this.variance(slotA), this.variance(slotB));
+  }
+
+  @Override
+  public Object getValue(int slot) {
+    if (fcontext.isShard()) {
+      ArrayList lst = new ArrayList(3);
+      lst.add(counts[slot]);
+      lst.add(result[slot]);
+      lst.add(sum[slot]);
+      return lst;
+    } else {
+      return this.variance(slot);
+    }
+  }
+
+  @Override
+  public void collect(int doc, int slot) throws IOException {
+    double val = values.doubleVal(doc);
+    if (values.exists(doc)) {
+      counts[slot]++;
+      result[slot] += val * val;
+      sum[slot] += val;
+    }
+  }
+}
+
+class StddevSlotAcc extends DoubleFuncSlotAcc {
+  int[] counts;
+  double[] sum;
+
+  public StddevSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+    super(values, fcontext, numSlots);
+    counts = new int[numSlots];
+    sum = new double[numSlots];
+  }
+
+  @Override
+  public void reset() {
+    super.reset();
+    Arrays.fill(counts, 0);
+    Arrays.fill(sum, 0);
+  }
+
+  @Override
+  public void resize(Resizer resizer) {
+    super.resize(resizer);
+    this.counts = resizer.resize(this.counts, 0);
+    this.result = resizer.resize(this.result, 0);
+  }
+
+  private double stdDev(double sumSq, double sum, int count) {
+    double val = count == 0 ? 0 : Math.sqrt((sumSq / count) - Math.pow(sum / count, 2)); 
+    return val;
+  }
+
+  private double stdDev(int slot) {
+    return stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
+  }
+
+  @Override
+  public int compare(int slotA, int slotB) {
+    return Double.compare(this.stdDev(slotA), this.stdDev(slotB));
+  }
+
+  @Override
+  public Object getValue(int slot) {
+    if (fcontext.isShard()) {
+      ArrayList lst = new ArrayList(3);
+      lst.add(counts[slot]);
+      lst.add(result[slot]);
+      lst.add(sum[slot]);
+      return lst;
+    } else {
+      return this.stdDev(slot);
+    }
+  }
+
+  @Override
+  public void collect(int doc, int slot) throws IOException {
+    double val = values.doubleVal(doc);
+    if (values.exists(doc)) {
+      counts[slot]++;
+      result[slot] += val * val;
+      sum[slot] += val;
+    }
+  }
+}
+
 abstract class CountSlotAcc extends SlotAcc {
   public CountSlotAcc(FacetContext fcontext) {
     super(fcontext);
   }
 
   public abstract void incrementCount(int slot, int count);
+
   public abstract int getCount(int slot);
 }
 
-
-
 class CountSlotArrAcc extends CountSlotAcc {
   int[] result;
+
   public CountSlotArrAcc(FacetContext fcontext, int numSlots) {
     super(fcontext);
     result = new int[numSlots];
   }
 
   @Override
-  public void collect(int doc, int slotNum) {       // TODO: count arrays can use fewer bytes based on the number of docs in the base set (that's the upper bound for single valued) - look at ttf?
+  public void collect(int doc, int slotNum) { // TODO: count arrays can use fewer bytes based on the number of docs in
+                                              // the base set (that's the upper bound for single valued) - look at ttf?
     result[slotNum]++;
   }
 
   @Override
   public int compare(int slotA, int slotB) {
-    return Integer.compare( result[slotA], result[slotB] );
+    return Integer.compare(result[slotA], result[slotB]);
   }
 
   @Override
@@ -439,7 +555,6 @@ class CountSlotArrAcc extends CountSlotAcc {
   }
 }
 
-
 class SortSlotAcc extends SlotAcc {
   public SortSlotAcc(FacetContext fcontext) {
     super(fcontext);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
new file mode 100644
index 0000000..917df6e
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search.facet;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.lucene.queries.function.ValueSource;
+
+
+public class StddevAgg extends SimpleAggValueSource {
+  public StddevAgg(ValueSource vs) {
+    super("stddev", vs);
+  }
+
+  @Override
+  public SlotAcc createSlotAcc(FacetContext fcontext, int numDocs, int numSlots) throws IOException {
+    return new StddevSlotAcc(getArg(), fcontext, numSlots);
+  }
+
+  @Override
+  public FacetMerger createFacetMerger(Object prototype) {
+    return new Merger();
+  }
+
+  private static class Merger extends FacetDoubleMerger {
+    long count;
+    double sumSq;
+    double sum;
+    
+    @Override
+    @SuppressWarnings("unchecked")
+    public void merge(Object facetResult, Context mcontext1) {
+      List<Number> numberList = (List<Number>)facetResult;
+      this.count += numberList.get(0).longValue();
+      this.sumSq += numberList.get(1).doubleValue();
+      this.sum += numberList.get(2).doubleValue();
+    }
+
+    @Override
+    public Object getMergedResult() {
+      return this.getDouble();
+    }
+    
+    @Override
+    protected double getDouble() {      
+      double val = count == 0 ? 0.0d : Math.sqrt((sumSq/count)-Math.pow(sum/count, 2));
+      return val;
+    }    
+  };
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
new file mode 100644
index 0000000..ec6955f
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search.facet;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.lucene.queries.function.ValueSource;
+
+
+public class VarianceAgg extends SimpleAggValueSource {
+  public VarianceAgg(ValueSource vs) {
+    super("variance", vs);
+  }
+
+  @Override
+  public SlotAcc createSlotAcc(FacetContext fcontext, int numDocs, int numSlots) throws IOException {
+    return new VarianceSlotAcc(getArg(), fcontext, numSlots);
+  }
+
+  @Override
+  public FacetMerger createFacetMerger(Object prototype) {
+    return new Merger();
+  }
+
+  private static class Merger extends FacetDoubleMerger {
+    long count;
+    double sumSq;
+    double sum;
+    
+    @Override
+    @SuppressWarnings("unchecked")
+    public void merge(Object facetResult, Context mcontext1) {
+      List<Number> numberList = (List<Number>)facetResult;
+      this.count += numberList.get(0).longValue();
+      this.sumSq += numberList.get(1).doubleValue();
+      this.sum += numberList.get(2).doubleValue();
+    }
+
+    @Override
+    public Object getMergedResult() {
+      return this.getDouble();
+    }
+    
+    @Override
+    protected double getDouble() {      
+      double val = count == 0 ? 0.0d : (sumSq/count)-Math.pow(sum/count, 2);
+      return val;
+    }    
+  };
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/Command.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/Command.java b/solr/core/src/java/org/apache/solr/search/grouping/Command.java
index 55e2d96..7391df6 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/Command.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/Command.java
@@ -60,6 +60,6 @@ public interface Command<T> {
   /**
    * @return The sort inside a group
    */
-  Sort getSortWithinGroup();
+  Sort getWithinGroupSort();
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
index 2dd2291..4ec01db 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
@@ -32,8 +32,8 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TimeLimitingCollector;
 import org.apache.lucene.search.TotalHitCountCollector;
 import org.apache.lucene.search.grouping.AllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupHeadsCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupHeadsCollector;
+import org.apache.lucene.search.grouping.TermGroupSelector;
+import org.apache.lucene.search.grouping.ValueSourceGroupSelector;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
@@ -174,9 +174,11 @@ public class CommandHandler {
     final AllGroupHeadsCollector allGroupHeadsCollector;
     if (fieldType.getNumberType() != null) {
       ValueSource vs = fieldType.getValueSource(sf, null);
-      allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(vs, new HashMap(), firstCommand.getSortWithinGroup());
+      allGroupHeadsCollector = AllGroupHeadsCollector.newCollector(new ValueSourceGroupSelector(vs, new HashMap<>()),
+          firstCommand.getWithinGroupSort());
     } else {
-      allGroupHeadsCollector = TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
+      allGroupHeadsCollector
+          = AllGroupHeadsCollector.newCollector(new TermGroupSelector(firstCommand.getKey()), firstCommand.getWithinGroupSort());
     }
     if (collectors.isEmpty()) {
       searchWithTimeLimiter(query, filter, allGroupHeadsCollector);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
index afb8ba7..1615237 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
@@ -149,7 +149,7 @@ public class QueryCommand implements Command<QueryCommandResult> {
   }
 
   @Override
-  public Sort getSortWithinGroup() {
+  public Sort getWithinGroupSort() {
     return null;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
index d5f9f9d..b81dda0 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/SearchGroupsFieldCommand.java
@@ -16,24 +16,26 @@
  */
 package org.apache.solr.search.grouping.distributed.command;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.Collector;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.grouping.AllGroupsCollector;
 import org.apache.lucene.search.grouping.FirstPassGroupingCollector;
 import org.apache.lucene.search.grouping.SearchGroup;
-import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector;
-import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermAllGroupsCollector;
-import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector;
+import org.apache.lucene.search.grouping.TermGroupSelector;
+import org.apache.lucene.search.grouping.ValueSourceGroupSelector;
 import org.apache.lucene.util.BytesRef;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.grouping.Command;
 
-import java.io.IOException;
-import java.util.*;
-
 /**
  * Creates all the collectors needed for the first phase and how to handle the results.
  */
@@ -98,18 +100,20 @@ public class SearchGroupsFieldCommand implements Command<SearchGroupsFieldComman
     if (topNGroups > 0) {
       if (fieldType.getNumberType() != null) {
         ValueSource vs = fieldType.getValueSource(field, null);
-        firstPassGroupingCollector = new FunctionFirstPassGroupingCollector(vs, new HashMap<Object,Object>(), groupSort, topNGroups);
+        firstPassGroupingCollector
+            = new FirstPassGroupingCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()), groupSort, topNGroups);
       } else {
-        firstPassGroupingCollector = new TermFirstPassGroupingCollector(field.getName(), groupSort, topNGroups);
+        firstPassGroupingCollector
+            = new FirstPassGroupingCollector<>(new TermGroupSelector(field.getName()), groupSort, topNGroups);
       }
       collectors.add(firstPassGroupingCollector);
     }
     if (includeGroupCount) {
       if (fieldType.getNumberType() != null) {
         ValueSource vs = fieldType.getValueSource(field, null);
-        allGroupsCollector = new FunctionAllGroupsCollector(vs, new HashMap<Object,Object>());
+        allGroupsCollector = new AllGroupsCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()));
       } else {
-        allGroupsCollector = new TermAllGroupsCollector(field.getName());
+        allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(field.getName()));
       }
       collectors.add(allGroupsCollector);
     }
@@ -138,7 +142,7 @@ public class SearchGroupsFieldCommand implements Command<SearchGroupsFieldComman
   }
 
   @Override
-  public Sort getSortWithinGroup() {
+  public Sort getWithinGroupSort() {
     return null;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
index 2c6c401..a496278 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/TopGroupsFieldCommand.java
@@ -16,28 +16,28 @@
  */
 package org.apache.solr.search.grouping.distributed.command;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.Collector;
 import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.grouping.SecondPassGroupingCollector;
 import org.apache.lucene.search.grouping.GroupDocs;
 import org.apache.lucene.search.grouping.SearchGroup;
+import org.apache.lucene.search.grouping.TermGroupSelector;
 import org.apache.lucene.search.grouping.TopGroups;
-import org.apache.lucene.search.grouping.function.FunctionSecondPassGroupingCollector;
-import org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector;
+import org.apache.lucene.search.grouping.TopGroupsCollector;
+import org.apache.lucene.search.grouping.ValueSourceGroupSelector;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.mutable.MutableValue;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.grouping.Command;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-
 /**
  * Defines all collectors for retrieving the second phase and how to handle the collector result.
  */
@@ -47,7 +47,7 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
 
     private SchemaField field;
     private Sort groupSort;
-    private Sort sortWithinGroup;
+    private Sort withinGroupSort;
     private Collection<SearchGroup<BytesRef>> firstPhaseGroups;
     private Integer maxDocPerGroup;
     private boolean needScores = false;
@@ -63,8 +63,8 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
       return this;
     }
 
-    public Builder setSortWithinGroup(Sort sortWithinGroup) {
-      this.sortWithinGroup = sortWithinGroup;
+    public Builder setSortWithinGroup(Sort withinGroupSort) {
+      this.withinGroupSort = withinGroupSort;
       return this;
     }
 
@@ -89,35 +89,35 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
     }
 
     public TopGroupsFieldCommand build() {
-      if (field == null || groupSort == null ||  sortWithinGroup == null || firstPhaseGroups == null ||
+      if (field == null || groupSort == null ||  withinGroupSort == null || firstPhaseGroups == null ||
           maxDocPerGroup == null) {
         throw new IllegalStateException("All required fields must be set");
       }
 
-      return new TopGroupsFieldCommand(field, groupSort, sortWithinGroup, firstPhaseGroups, maxDocPerGroup, needScores, needMaxScore);
+      return new TopGroupsFieldCommand(field, groupSort, withinGroupSort, firstPhaseGroups, maxDocPerGroup, needScores, needMaxScore);
     }
 
   }
 
   private final SchemaField field;
   private final Sort groupSort;
-  private final Sort sortWithinGroup;
+  private final Sort withinGroupSort;
   private final Collection<SearchGroup<BytesRef>> firstPhaseGroups;
   private final int maxDocPerGroup;
   private final boolean needScores;
   private final boolean needMaxScore;
-  private SecondPassGroupingCollector secondPassCollector;
+  private TopGroupsCollector secondPassCollector;
 
   private TopGroupsFieldCommand(SchemaField field,
                                 Sort groupSort,
-                                Sort sortWithinGroup,
+                                Sort withinGroupSort,
                                 Collection<SearchGroup<BytesRef>> firstPhaseGroups,
                                 int maxDocPerGroup,
                                 boolean needScores,
                                 boolean needMaxScore) {
     this.field = field;
     this.groupSort = groupSort;
-    this.sortWithinGroup = sortWithinGroup;
+    this.withinGroupSort = withinGroupSort;
     this.firstPhaseGroups = firstPhaseGroups;
     this.maxDocPerGroup = maxDocPerGroup;
     this.needScores = needScores;
@@ -135,12 +135,12 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
     if (fieldType.getNumberType() != null) {
       ValueSource vs = fieldType.getValueSource(field, null);
       Collection<SearchGroup<MutableValue>> v = GroupConverter.toMutable(field, firstPhaseGroups);
-      secondPassCollector = new FunctionSecondPassGroupingCollector(
-          v, groupSort, sortWithinGroup, maxDocPerGroup, needScores, needMaxScore, true, vs, new HashMap<Object,Object>()
+      secondPassCollector = new TopGroupsCollector<>(new ValueSourceGroupSelector(vs, new HashMap<>()),
+          v, groupSort, withinGroupSort, maxDocPerGroup, needScores, needMaxScore, true
       );
     } else {
-      secondPassCollector = new TermSecondPassGroupingCollector(
-          field.getName(), firstPhaseGroups, groupSort, sortWithinGroup, maxDocPerGroup, needScores, needMaxScore, true
+      secondPassCollector = new TopGroupsCollector<>(new TermGroupSelector(field.getName()),
+          firstPhaseGroups, groupSort, withinGroupSort, maxDocPerGroup, needScores, needMaxScore, true
       );
     }
     collectors.add(secondPassCollector);
@@ -151,7 +151,7 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
   @SuppressWarnings("unchecked")
   public TopGroups<BytesRef> result() {
     if (firstPhaseGroups.isEmpty()) {
-      return new TopGroups<>(groupSort.getSort(), sortWithinGroup.getSort(), 0, 0, new GroupDocs[0], Float.NaN);
+      return new TopGroups<>(groupSort.getSort(), withinGroupSort.getSort(), 0, 0, new GroupDocs[0], Float.NaN);
     }
 
     FieldType fieldType = field.getType();
@@ -173,7 +173,7 @@ public class TopGroupsFieldCommand implements Command<TopGroups<BytesRef>> {
   }
 
   @Override
-  public Sort getSortWithinGroup() {
-    return sortWithinGroup;
+  public Sort getWithinGroupSort() {
+    return withinGroupSort;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
index a12cad1..ab13f72 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/SearchGroupShardResponseProcessor.java
@@ -55,8 +55,8 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
     SortSpec ss = rb.getSortSpec();
     Sort groupSort = rb.getGroupingSpec().getGroupSort();
     final String[] fields = rb.getGroupingSpec().getFields();
-    Sort sortWithinGroup = rb.getGroupingSpec().getSortWithinGroup();
-    assert sortWithinGroup != null;
+    Sort withinGroupSort = rb.getGroupingSpec().getSortWithinGroup();
+    assert withinGroupSort != null;
 
     final Map<String, List<Collection<SearchGroup<BytesRef>>>> commandSearchGroups = new HashMap<>(fields.length, 1.0f);
     final Map<String, Map<SearchGroup<BytesRef>, Set<String>>> tempSearchGroupToShards = new HashMap<>(fields.length, 1.0f);
@@ -111,7 +111,7 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
       maxElapsedTime = (int) Math.max(maxElapsedTime, srsp.getSolrResponse().getElapsedTime());
       @SuppressWarnings("unchecked")
       NamedList<NamedList> firstPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("firstPhase");
-      final Map<String, SearchGroupsFieldCommandResult> result = serializer.transformToNative(firstPhaseResult, groupSort, sortWithinGroup, srsp.getShard());
+      final Map<String, SearchGroupsFieldCommandResult> result = serializer.transformToNative(firstPhaseResult, groupSort, withinGroupSort, srsp.getShard());
       for (String field : commandSearchGroups.keySet()) {
         final SearchGroupsFieldCommandResult firstPhaseCommandResult = result.get(field);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
index 2ac83c6..231e9bd 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
@@ -58,8 +58,8 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
     Sort groupSort = rb.getGroupingSpec().getGroupSort();
     String[] fields = rb.getGroupingSpec().getFields();
     String[] queries = rb.getGroupingSpec().getQueries();
-    Sort sortWithinGroup = rb.getGroupingSpec().getSortWithinGroup();
-    assert sortWithinGroup != null;
+    Sort withinGroupSort = rb.getGroupingSpec().getSortWithinGroup();
+    assert withinGroupSort != null;
 
     // If group.format=simple group.offset doesn't make sense
     int groupOffsetDefault;
@@ -122,7 +122,7 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
       NamedList<NamedList> secondPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("secondPhase");
       if(secondPhaseResult == null)
         continue;
-      Map<String, ?> result = serializer.transformToNative(secondPhaseResult, groupSort, sortWithinGroup, srsp.getShard());
+      Map<String, ?> result = serializer.transformToNative(secondPhaseResult, groupSort, withinGroupSort, srsp.getShard());
       int numFound = 0;
       float maxScore = Float.NaN;
       for (String field : commandTopGroups.keySet()) {
@@ -164,7 +164,7 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
           docsPerGroup += subTopGroups.totalGroupedHitCount;
         }
       }
-      rb.mergedTopGroups.put(groupField, TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, sortWithinGroup, groupOffsetDefault, docsPerGroup, TopGroups.ScoreMergeMode.None));
+      rb.mergedTopGroups.put(groupField, TopGroups.merge(topGroups.toArray(topGroupsArr), groupSort, withinGroupSort, groupOffsetDefault, docsPerGroup, TopGroups.ScoreMergeMode.None));
     }
 
     for (String query : commandTopDocs.keySet()) {
@@ -178,10 +178,10 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
 
       int topN = rb.getGroupingSpec().getOffset() + rb.getGroupingSpec().getLimit();
       final TopDocs mergedTopDocs;
-      if (sortWithinGroup.equals(Sort.RELEVANCE)) {
+      if (withinGroupSort.equals(Sort.RELEVANCE)) {
         mergedTopDocs = TopDocs.merge(topN, topDocs.toArray(new TopDocs[topDocs.size()]));
       } else {
-        mergedTopDocs = TopDocs.merge(sortWithinGroup, topN, topDocs.toArray(new TopFieldDocs[topDocs.size()]));
+        mergedTopDocs = TopDocs.merge(withinGroupSort, topN, topDocs.toArray(new TopFieldDocs[topDocs.size()]));
       }
       rb.mergedQueryCommandResults.put(query, new QueryCommandResult(mergedTopDocs, mergedMatches));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
index 2602221..77dfcef 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/SearchGroupsResultTransformer.java
@@ -77,7 +77,7 @@ public class SearchGroupsResultTransformer implements ShardResultTransformer<Lis
    * {@inheritDoc}
    */
   @Override
-  public Map<String, SearchGroupsFieldCommandResult> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort sortWithinGroup, String shard) {
+  public Map<String, SearchGroupsFieldCommandResult> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort withinGroupSort, String shard) {
     final Map<String, SearchGroupsFieldCommandResult> result = new HashMap<>(shardResponse.size());
     for (Map.Entry<String, NamedList> command : shardResponse) {
       List<SearchGroup<BytesRef>> searchGroups = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
index 04a3dfc..47e20a0 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/ShardResultTransformer.java
@@ -44,10 +44,10 @@ public interface ShardResultTransformer<T, R> {
    *
    * @param shardResponse The shard response containing data in a {@link NamedList} structure
    * @param groupSort The group sort
-   * @param sortWithinGroup The sort inside a group
+   * @param withinGroupSort The sort inside a group
    * @param shard The shard address where the response originated from
    * @return native structure of the data
    */
-  R transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort sortWithinGroup, String shard);
+  R transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort withinGroupSort, String shard);
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
index 83c81e5..41145ba 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java
@@ -92,7 +92,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
    * {@inheritDoc}
    */
   @Override
-  public Map<String, ?> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort sortWithinGroup, String shard) {
+  public Map<String, ?> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort withinGroupSort, String shard) {
     Map<String, Object> result = new HashMap<>();
 
     final IndexSchema schema = rb.req.getSearcher().getSchema();
@@ -113,10 +113,10 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
         List<NamedList<Object>> documents = (List<NamedList<Object>>) commandResult.get("documents");
         ScoreDoc[] scoreDocs = transformToNativeShardDoc(documents, groupSort, shard, schema);
         final TopDocs topDocs;
-        if (sortWithinGroup.equals(Sort.RELEVANCE)) {
+        if (withinGroupSort.equals(Sort.RELEVANCE)) {
           topDocs = new TopDocs(totalHits, scoreDocs, maxScore);
         } else {
-          topDocs = new TopFieldDocs(totalHits, scoreDocs, sortWithinGroup.getSort(), maxScore);
+          topDocs = new TopFieldDocs(totalHits, scoreDocs, withinGroupSort.getSort(), maxScore);
         }
         result.put(key, new QueryCommandResult(topDocs, matches));
         continue;
@@ -137,7 +137,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
 
         @SuppressWarnings("unchecked")
         List<NamedList<Object>> documents = (List<NamedList<Object>>) groupResult.get("documents");
-        ScoreDoc[] scoreDocs = transformToNativeShardDoc(documents, sortWithinGroup, shard, schema);
+        ScoreDoc[] scoreDocs = transformToNativeShardDoc(documents, withinGroupSort, shard, schema);
 
         BytesRef groupValueRef = groupValue != null ? new BytesRef(groupValue) : null;
         groupDocs.add(new GroupDocs<>(Float.NaN, maxScore, totalGroupHits, scoreDocs, groupValueRef, null));
@@ -146,7 +146,7 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
       @SuppressWarnings("unchecked")
       GroupDocs<BytesRef>[] groupDocsArr = groupDocs.toArray(new GroupDocs[groupDocs.size()]);
       TopGroups<BytesRef> topGroups = new TopGroups<>(
-           groupSort.getSort(), sortWithinGroup.getSort(), totalHitCount, totalGroupedHitCount, groupDocsArr, Float.NaN
+           groupSort.getSort(), withinGroupSort.getSort(), totalHitCount, totalGroupedHitCount, groupDocsArr, Float.NaN
       );
 
       result.put(key, topGroups);
@@ -222,8 +222,8 @@ public class TopGroupsResultTransformer implements ShardResultTransformer<List<C
         Object[] convertedSortValues  = new Object[fieldDoc.fields.length];
         for (int j = 0; j < fieldDoc.fields.length; j++) {
           Object sortValue  = fieldDoc.fields[j];
-          Sort sortWithinGroup = rb.getGroupingSpec().getSortWithinGroup();
-          SchemaField field = sortWithinGroup.getSort()[j].getField() != null ? schema.getFieldOrNull(sortWithinGroup.getSort()[j].getField()) : null;
+          Sort withinGroupSort = rb.getGroupingSpec().getSortWithinGroup();
+          SchemaField field = withinGroupSort.getSort()[j].getField() != null ? schema.getFieldOrNull(withinGroupSort.getSort()[j].getField()) : null;
           if (field != null) {
             FieldType fieldType = field.getType();
             if (sortValue != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
index 999cd64..edccc88 100644
--- a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
@@ -89,7 +89,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
     public Query rewrite(IndexReader reader) throws IOException {
       SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
 
-      CoreContainer container = info.getReq().getCore().getCoreDescriptor().getCoreContainer();
+      CoreContainer container = info.getReq().getCore().getCoreContainer();
 
       final SolrCore fromCore = container.getCore(fromIndex);
 
@@ -222,7 +222,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
         final String myCore = req.getCore().getCoreDescriptor().getName();
 
         if (fromIndex != null && (!fromIndex.equals(myCore) || byPassShortCircutCheck)) {
-          CoreContainer container = req.getCore().getCoreDescriptor().getCoreContainer();
+          CoreContainer container = req.getCore().getCoreContainer();
 
           final String coreName = getCoreName(fromIndex, container);
           final SolrCore fromCore = container.getCore(coreName);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index ff0db9b..39ccadc 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -16,7 +16,6 @@
  */
 package org.apache.solr.servlet;
 
-import javax.management.MBeanServer;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
 import javax.servlet.ServletException;
@@ -34,7 +33,6 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.lang.invoke.MethodHandles;
-import java.lang.management.ManagementFactory;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.time.Instant;
@@ -42,12 +40,12 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Locale;
 import java.util.Properties;
+import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import com.codahale.metrics.jvm.BufferPoolMetricSet;
 import com.codahale.metrics.jvm.ClassLoadingGaugeSet;
 import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
 import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
@@ -66,9 +64,11 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.NodeConfig;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.core.SolrXmlConfig;
+import org.apache.solr.metrics.AltBufferPoolMetricSet;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.OperatingSystemMetricSet;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.request.SolrRequestInfo;
@@ -185,16 +185,24 @@ public class SolrDispatchFilter extends BaseSolrFilter {
   }
 
   private void setupJvmMetrics()  {
-    MBeanServer platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
     SolrMetricManager metricManager = cores.getMetricManager();
+    final Set<String> hiddenSysProps = cores.getConfig().getHiddenSysProps();
     try {
-      String registry = SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm);
-      metricManager.registerAll(registry, new BufferPoolMetricSet(platformMBeanServer), true, "buffers");
+      String registry = SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm);
+      metricManager.registerAll(registry, new AltBufferPoolMetricSet(), true, "buffers");
       metricManager.registerAll(registry, new ClassLoadingGaugeSet(), true, "classes");
-      metricManager.registerAll(registry, new OperatingSystemMetricSet(platformMBeanServer), true, "os");
+      metricManager.registerAll(registry, new OperatingSystemMetricSet(), true, "os");
       metricManager.registerAll(registry, new GarbageCollectorMetricSet(), true, "gc");
       metricManager.registerAll(registry, new MemoryUsageGaugeSet(), true, "memory");
       metricManager.registerAll(registry, new ThreadStatesGaugeSet(), true, "threads"); // todo should we use CachedThreadStatesGaugeSet instead?
+      MetricsMap sysprops = new MetricsMap((detailed, map) -> {
+        System.getProperties().forEach((k, v) -> {
+          if (!hiddenSysProps.contains(k)) {
+            map.put(String.valueOf(k), v);
+          }
+        });
+      });
+      metricManager.registerGauge(null, registry, sysprops, true, "properties", "system");
     } catch (Exception e) {
       log.warn("Error registering JVM metrics", e);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
index 3394de1..75917d0 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
@@ -15,6 +15,8 @@
  * limitations under the License.
  */
 package org.apache.solr.spelling;
+import static org.apache.solr.common.params.CommonParams.ID;
+
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -132,6 +134,19 @@ public class SpellCheckCollator {
         params.remove(DisMaxParams.BF);
         // Collate testing does not support Grouping (see SOLR-2577)
         params.remove(GroupParams.GROUP);
+        
+        // Collate testing does not support the Collapse QParser (See SOLR-8807)
+        params.remove("expand");
+        String[] filters = params.getParams(CommonParams.FQ);
+        if (filters != null) {
+          List<String> filtersToApply = new ArrayList<>(filters.length);
+          for (String fq : filters) {
+            if (!fq.startsWith("{!collapse")) {
+              filtersToApply.add(fq);
+            }
+          }
+          params.set("fq", filtersToApply.toArray(new String[filtersToApply.size()]));
+        }      
 
         // creating a request here... make sure to close it!
         ResponseBuilder checkResponse = new ResponseBuilder(

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java b/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
index 6499c50..4cc75b5 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellingQueryConverter.java
@@ -89,7 +89,7 @@ public class SpellingQueryConverter extends QueryConverter  {
     NMTOKEN = "([" + sb.toString() + "]|" + SURROGATE_PAIR + ")+";
   }
 
-  final static String PATTERN = "(?:(?!(" + NMTOKEN + ":|[\\^.]\\d+)))[^^.\\s][\\p{L}_\\-0-9]+";
+  final static String PATTERN = "(?:(?!(" + NMTOKEN + ":|[\\^.]\\d+)))[^^.:(\\s][\\p{L}_\\-0-9]+";
   // previous version: Pattern.compile("(?:(?!(\\w+:|\\d+)))\\w+");
   protected Pattern QUERY_REGEX = Pattern.compile(PATTERN);
   

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
index d3e3497..b8b9bea 100644
--- a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
+++ b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
@@ -16,20 +16,23 @@
  */
 package org.apache.solr.store.blockcache;
 
-import java.net.URL;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrInfoMBean;
+import com.codahale.metrics.MetricRegistry;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.search.SolrCacheBase;
 
 /**
- * A {@link SolrInfoMBean} that provides metrics on block cache operations.
+ * A {@link SolrInfoBean} that provides metrics on block cache operations.
  *
  * @lucene.experimental
  */
-public class Metrics extends SolrCacheBase implements SolrInfoMBean {
+public class Metrics extends SolrCacheBase implements SolrInfoBean, SolrMetricProducer {
 
 
   public AtomicLong blockCacheSize = new AtomicLong(0);
@@ -50,66 +53,70 @@ public class Metrics extends SolrCacheBase implements SolrInfoMBean {
   public AtomicLong shardBuffercacheAllocate = new AtomicLong(0);
   public AtomicLong shardBuffercacheLost = new AtomicLong(0);
 
+  private MetricsMap metricsMap;
+  private MetricRegistry registry;
+  private Set<String> metricNames = new HashSet<>();
 
   private long previous = System.nanoTime();
 
-
-  public NamedList<Number> getStatistics() {
-    NamedList<Number> stats = new SimpleOrderedMap<>(21); // room for one method call before growing
-
-    long now = System.nanoTime();
-    long delta = Math.max(now - previous, 1);
-    double seconds = delta / 1000000000.0;
-
-    long hits_total = blockCacheHit.get();
-    long hits_delta = hits_total - blockCacheHit_last.get();
-    blockCacheHit_last.set(hits_total);
-
-    long miss_total = blockCacheMiss.get();
-    long miss_delta = miss_total - blockCacheMiss_last.get();
-    blockCacheMiss_last.set(miss_total);
-
-    long evict_total = blockCacheEviction.get();
-    long evict_delta = evict_total - blockCacheEviction_last.get();
-    blockCacheEviction_last.set(evict_total);
-
-    long storeFail_total = blockCacheStoreFail.get();
-    long storeFail_delta = storeFail_total - blockCacheStoreFail_last.get();
-    blockCacheStoreFail_last.set(storeFail_total);
-
-    long lookups_delta = hits_delta + miss_delta;
-    long lookups_total = hits_total + miss_total;
-
-    stats.add("size", blockCacheSize.get());
-    stats.add("lookups", lookups_total);
-    stats.add("hits", hits_total);
-    stats.add("evictions", evict_total);
-    stats.add("storeFails", storeFail_total);
-    stats.add("hitratio_current", calcHitRatio(lookups_delta, hits_delta));  // hit ratio since the last call
-    stats.add("lookups_persec", getPerSecond(lookups_delta, seconds)); // lookups per second since the last call
-    stats.add("hits_persec", getPerSecond(hits_delta, seconds));       // hits per second since the last call
-    stats.add("evictions_persec", getPerSecond(evict_delta, seconds));  // evictions per second since the last call
-    stats.add("storeFails_persec", getPerSecond(storeFail_delta, seconds));  // evictions per second since the last call
-    stats.add("time_delta", seconds);  // seconds since last call
-
-    // TODO: these aren't really related to the BlockCache
-    stats.add("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
-    stats.add("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
-
-    previous = now;
-
-    return stats;
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    metricsMap = new MetricsMap((detailed, map) -> {
+      long now = System.nanoTime();
+      long delta = Math.max(now - previous, 1);
+      double seconds = delta / 1000000000.0;
+
+      long hits_total = blockCacheHit.get();
+      long hits_delta = hits_total - blockCacheHit_last.get();
+      blockCacheHit_last.set(hits_total);
+
+      long miss_total = blockCacheMiss.get();
+      long miss_delta = miss_total - blockCacheMiss_last.get();
+      blockCacheMiss_last.set(miss_total);
+
+      long evict_total = blockCacheEviction.get();
+      long evict_delta = evict_total - blockCacheEviction_last.get();
+      blockCacheEviction_last.set(evict_total);
+
+      long storeFail_total = blockCacheStoreFail.get();
+      long storeFail_delta = storeFail_total - blockCacheStoreFail_last.get();
+      blockCacheStoreFail_last.set(storeFail_total);
+
+      long lookups_delta = hits_delta + miss_delta;
+      long lookups_total = hits_total + miss_total;
+
+      map.put("size", blockCacheSize.get());
+      map.put("lookups", lookups_total);
+      map.put("hits", hits_total);
+      map.put("evictions", evict_total);
+      map.put("storeFails", storeFail_total);
+      map.put("hitratio_current", calcHitRatio(lookups_delta, hits_delta));  // hit ratio since the last call
+      map.put("lookups_persec", getPerSecond(lookups_delta, seconds)); // lookups per second since the last call
+      map.put("hits_persec", getPerSecond(hits_delta, seconds));       // hits per second since the last call
+      map.put("evictions_persec", getPerSecond(evict_delta, seconds));  // evictions per second since the last call
+      map.put("storeFails_persec", getPerSecond(storeFail_delta, seconds));  // evictions per second since the last call
+      map.put("time_delta", seconds);  // seconds since last call
+
+      // TODO: these aren't really related to the BlockCache
+      map.put("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
+      map.put("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
+
+      previous = now;
+
+    });
+    manager.registerGauge(this, registryName, metricsMap, true, getName(), getCategory().toString(), scope);
   }
 
   private float getPerSecond(long value, double seconds) {
     return (float) (value / seconds);
   }
 
-  // SolrInfoMBean methods
+  // SolrInfoBean methods
 
   @Override
   public String getName() {
-    return "HdfsBlockCache";
+    return "hdfsBlockCache";
   }
 
   @Override
@@ -118,12 +125,13 @@ public class Metrics extends SolrCacheBase implements SolrInfoMBean {
   }
 
   @Override
-  public String getSource() {
-    return null;
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
index ba7c7fd..64e6356 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
@@ -18,8 +18,8 @@ package org.apache.solr.store.hdfs;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -27,16 +27,18 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
+import com.codahale.metrics.MetricRegistry;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class HdfsLocalityReporter implements SolrInfoMBean {
+public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
   public static final String LOCALITY_BYTES_TOTAL = "locality.bytes.total";
   public static final String LOCALITY_BYTES_LOCAL = "locality.bytes.local";
   public static final String LOCALITY_BYTES_RATIO = "locality.bytes.ratio";
@@ -49,6 +51,9 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
   private String hostname;
   private final ConcurrentMap<HdfsDirectory,ConcurrentMap<FileStatus,BlockLocation[]>> cache;
 
+  private final Set<String> metricNames = new HashSet<>();
+  private MetricRegistry registry;
+
   public HdfsLocalityReporter() {
     cache = new ConcurrentHashMap<>();
   }
@@ -67,11 +72,6 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
   }
 
   @Override
-  public String getVersion() {
-    return getClass().getPackage().getSpecificationVersion();
-  }
-
-  @Override
   public String getDescription() {
     return "Provides metrics for HDFS data locality.";
   }
@@ -82,89 +82,71 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
   }
 
   @Override
-  public String getSource() {
-    return null;
+  public Set<String> getMetricNames() {
+    return metricNames;
   }
 
   @Override
-  public URL[] getDocs() {
-    return null;
+  public MetricRegistry getMetricRegistry() {
+    return registry;
   }
 
   /**
    * Provide statistics on HDFS block locality, both in terms of bytes and block counts.
    */
   @Override
-  public NamedList getStatistics() {
-    long totalBytes = 0;
-    long localBytes = 0;
-    int totalCount = 0;
-    int localCount = 0;
-
-    for (Iterator<HdfsDirectory> iterator = cache.keySet().iterator(); iterator.hasNext();) {
-      HdfsDirectory hdfsDirectory = iterator.next();
-
-      if (hdfsDirectory.isClosed()) {
-        iterator.remove();
-      } else {
-        try {
-          refreshDirectory(hdfsDirectory);
-          Map<FileStatus,BlockLocation[]> blockMap = cache.get(hdfsDirectory);
-
-          // For every block in every file in this directory, count it
-          for (BlockLocation[] locations : blockMap.values()) {
-            for (BlockLocation bl : locations) {
-              totalBytes += bl.getLength();
-              totalCount++;
-
-              if (Arrays.asList(bl.getHosts()).contains(hostname)) {
-                localBytes += bl.getLength();
-                localCount++;
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String scope) {
+    registry = manager.registry(registryName);
+    MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
+      long totalBytes = 0;
+      long localBytes = 0;
+      int totalCount = 0;
+      int localCount = 0;
+
+      for (Iterator<HdfsDirectory> iterator = cache.keySet().iterator(); iterator.hasNext();) {
+        HdfsDirectory hdfsDirectory = iterator.next();
+
+        if (hdfsDirectory.isClosed()) {
+          iterator.remove();
+        } else {
+          try {
+            refreshDirectory(hdfsDirectory);
+            Map<FileStatus,BlockLocation[]> blockMap = cache.get(hdfsDirectory);
+
+            // For every block in every file in this directory, count it
+            for (BlockLocation[] locations : blockMap.values()) {
+              for (BlockLocation bl : locations) {
+                totalBytes += bl.getLength();
+                totalCount++;
+
+                if (Arrays.asList(bl.getHosts()).contains(hostname)) {
+                  localBytes += bl.getLength();
+                  localCount++;
+                }
               }
             }
+          } catch (IOException e) {
+            logger.warn("Could not retrieve locality information for {} due to exception: {}",
+                hdfsDirectory.getHdfsDirPath(), e);
           }
-        } catch (IOException e) {
-          logger.warn("Could not retrieve locality information for {} due to exception: {}",
-              hdfsDirectory.getHdfsDirPath(), e);
         }
       }
-    }
-
-    return createStatistics(totalBytes, localBytes, totalCount, localCount);
-  }
-
-  /**
-   * Generate a statistics object based on the given measurements for all files monitored by this reporter.
-   * 
-   * @param totalBytes
-   *          The total bytes used
-   * @param localBytes
-   *          The amount of bytes found on local nodes
-   * @param totalCount
-   *          The total block count
-   * @param localCount
-   *          The amount of blocks found on local nodes
-   * @return HDFS block locality statistics
-   */
-  private NamedList<Number> createStatistics(long totalBytes, long localBytes, int totalCount, int localCount) {
-    NamedList<Number> statistics = new SimpleOrderedMap<Number>();
-
-    statistics.add(LOCALITY_BYTES_TOTAL, totalBytes);
-    statistics.add(LOCALITY_BYTES_LOCAL, localBytes);
-    if (localBytes == 0) {
-      statistics.add(LOCALITY_BYTES_RATIO, 0);
-    } else {
-      statistics.add(LOCALITY_BYTES_RATIO, localBytes / (double) totalBytes);
-    }
-    statistics.add(LOCALITY_BLOCKS_TOTAL, totalCount);
-    statistics.add(LOCALITY_BLOCKS_LOCAL, localCount);
-    if (localCount == 0) {
-      statistics.add(LOCALITY_BLOCKS_RATIO, 0);
-    } else {
-      statistics.add(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount);
-    }
-
-    return statistics;
+      map.put(LOCALITY_BYTES_TOTAL, totalBytes);
+      map.put(LOCALITY_BYTES_LOCAL, localBytes);
+      if (localBytes == 0) {
+        map.put(LOCALITY_BYTES_RATIO, 0);
+      } else {
+        map.put(LOCALITY_BYTES_RATIO, localBytes / (double) totalBytes);
+      }
+      map.put(LOCALITY_BLOCKS_TOTAL, totalCount);
+      map.put(LOCALITY_BLOCKS_LOCAL, localCount);
+      if (localCount == 0) {
+        map.put(LOCALITY_BLOCKS_RATIO, 0);
+      } else {
+        map.put(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount);
+      }
+    });
+    manager.registerGauge(this, registryName, metricsMap, true, "hdfsLocality", getCategory().toString(), scope);
   }
 
   /**
@@ -209,4 +191,5 @@ public class HdfsLocalityReporter implements SolrInfoMBean {
       }
     }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
index 0ba0b81..7006b4a 100644
--- a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
+++ b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
@@ -19,11 +19,11 @@ package org.apache.solr.uninverting;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Map;
+import java.util.function.Function;
 
 import org.apache.lucene.document.BinaryDocValuesField; // javadocs
 import org.apache.lucene.document.NumericDocValuesField; // javadocs
 import org.apache.lucene.document.SortedDocValuesField; // javadocs
-import org.apache.lucene.document.SortedNumericDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField; // javadocs
 import org.apache.lucene.document.StringField; // javadocs
 import org.apache.lucene.index.BinaryDocValues;
@@ -170,62 +170,44 @@ public class UninvertingReader extends FilterLeafReader {
      * Fields with this type act as if they were indexed with
      * {@link SortedSetDocValuesField}.
      */
-    SORTED_SET_DOUBLE,
-    /** 
-     * Multi-valued Integer, (e.g. indexed with {@link org.apache.lucene.document.IntPoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_INTEGER,
-    /** 
-     * Multi-valued Float, (e.g. indexed with {@link org.apache.lucene.document.FloatPoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_FLOAT,
-    /** 
-     * Multi-valued Long, (e.g. indexed with {@link org.apache.lucene.document.LongPoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_LONG,
-    /** 
-     * Multi-valued Double, (e.g. indexed with {@link org.apache.lucene.document.DoublePoint})
-     * <p>
-     * Fields with this type act as if they were indexed with
-     * {@link SortedNumericDocValuesField}.
-     */
-    SORTED_DOUBLE
+    SORTED_SET_DOUBLE
+
   }
   
   /**
+   * 
    * Wraps a provided DirectoryReader. Note that for convenience, the returned reader
    * can be used normally (e.g. passed to {@link DirectoryReader#openIfChanged(DirectoryReader)})
    * and so on. 
+   * 
+   * @param in input directory reader
+   * @param perSegmentMapper function to map a segment reader to a mapping of fields to their uninversion type
+   * @return a wrapped directory reader
    */
+  public static DirectoryReader wrap(DirectoryReader in, final Function<LeafReader, Map<String,Type>> perSegmentMapper) throws IOException {
+    return new UninvertingDirectoryReader(in, perSegmentMapper);
+  }
+  
   public static DirectoryReader wrap(DirectoryReader in, final Map<String,Type> mapping) throws IOException {
-    return new UninvertingDirectoryReader(in, mapping);
+    return UninvertingReader.wrap(in, (r) -> mapping);
   }
   
   static class UninvertingDirectoryReader extends FilterDirectoryReader {
-    final Map<String,Type> mapping;
+    final Function<LeafReader, Map<String,Type>> mapper;
     
-    public UninvertingDirectoryReader(DirectoryReader in, final Map<String,Type> mapping) throws IOException {
+    public UninvertingDirectoryReader(DirectoryReader in, final Function<LeafReader, Map<String,Type>> mapper) throws IOException {
       super(in, new FilterDirectoryReader.SubReaderWrapper() {
         @Override
         public LeafReader wrap(LeafReader reader) {
-          return new UninvertingReader(reader, mapping);
+          return new UninvertingReader(reader, mapper.apply(reader));
         }
       });
-      this.mapping = mapping;
+      this.mapper = mapper;
     }
 
     @Override
     protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
-      return new UninvertingDirectoryReader(in, mapping);
+      return new UninvertingDirectoryReader(in, mapper);
     }
 
     // NOTE: delegating the cache helpers is wrong since this wrapper alters the
@@ -244,7 +226,7 @@ public class UninvertingReader extends FilterLeafReader {
   /** 
    * Create a new UninvertingReader with the specified mapping 
    * <p>
-   * Expert: This should almost never be used. Use {@link #wrap(DirectoryReader, Map)}
+   * Expert: This should almost never be used. Use {@link #wrap(DirectoryReader, Function)}
    * instead.
    *  
    * @lucene.internal
@@ -293,12 +275,6 @@ public class UninvertingReader extends FilterLeafReader {
             case SORTED_SET_DOUBLE:
               type = DocValuesType.SORTED_SET;
               break;
-            case SORTED_INTEGER:
-            case SORTED_FLOAT:
-            case SORTED_LONG:
-            case SORTED_DOUBLE:
-              type = DocValuesType.SORTED_NUMERIC;
-              break;
             default:
               throw new AssertionError();
           }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
index d0daebb..bc2afa8 100644
--- a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
+++ b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
@@ -281,7 +281,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
     Thread thread = new Thread() {
       @Override
       public void run() {
-        MDCLoggingContext.setCoreDescriptor(cd);
+        MDCLoggingContext.setCoreDescriptor(cc, cd);
         try {
           if (SKIP_AUTO_RECOVERY) {
             log.warn("Skipping recovery according to sys prop solrcloud.skip.autorecovery");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
index fdc9d22..dd179f2 100644
--- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
+++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
@@ -18,7 +18,6 @@ package org.apache.solr.update;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
@@ -48,8 +47,6 @@ import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.SolrConfig.UpdateHandlerInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.metrics.SolrMetricManager;
@@ -124,7 +121,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
     commitWithinSoftCommit = updateHandlerInfo.commitWithinSoftCommit;
     indexWriterCloseWaitsForMerges = updateHandlerInfo.indexWriterCloseWaitsForMerges;
 
-    ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zkController = core.getCoreContainer().getZkController();
     if (zkController != null) {
       DocCollection dc = zkController.getClusterState().getCollection(core.getCoreDescriptor().getCollectionName());
       if (dc.getRealtimeReplicas() == 1) {
@@ -162,24 +159,40 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
-    commitCommands = manager.meter(registry, "commits", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope);
-    optimizeCommands = manager.meter(registry, "optimizes", getCategory().toString(), scope);
-    rollbackCommands = manager.meter(registry, "rollbacks", getCategory().toString(), scope);
-    splitCommands = manager.meter(registry, "splits", getCategory().toString(), scope);
-    mergeIndexesCommands = manager.meter(registry, "merges", getCategory().toString(), scope);
-    expungeDeleteCommands = manager.meter(registry, "expungeDeletes", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope);
-    manager.registerGauge(registry, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope);
-
-    addCommandsCumulative = manager.meter(registry, "cumulativeAdds", getCategory().toString(), scope);
-    deleteByIdCommandsCumulative = manager.meter(registry, "cumulativeDeletesById", getCategory().toString(), scope);
-    deleteByQueryCommandsCumulative = manager.meter(registry, "cumulativeDeletesByQuery", getCategory().toString(), scope);
-    numErrorsCumulative = manager.meter(registry, "cumulativeErrors", getCategory().toString(), scope);
+    commitCommands = manager.meter(this, registry, "commits", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope);
+    if (commitTracker.getDocsUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> commitTracker.getDocsUpperBound(), true, "autoCommitMaxDocs",
+          getCategory().toString(), scope);
+    }
+    if (commitTracker.getTimeUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> "" + commitTracker.getTimeUpperBound() + "ms", true, "autoCommitMaxTime",
+          getCategory().toString(), scope);
+    }
+    if (softCommitTracker.getDocsUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> softCommitTracker.getDocsUpperBound(), true, "softAutoCommitMaxDocs",
+          getCategory().toString(), scope);
+    }
+    if (softCommitTracker.getTimeUpperBound() > 0) {
+      manager.registerGauge(this, registry, () -> "" + softCommitTracker.getTimeUpperBound() + "ms", true, "softAutoCommitMaxTime",
+          getCategory().toString(), scope);
+    }
+    optimizeCommands = manager.meter(this, registry, "optimizes", getCategory().toString(), scope);
+    rollbackCommands = manager.meter(this, registry, "rollbacks", getCategory().toString(), scope);
+    splitCommands = manager.meter(this, registry, "splits", getCategory().toString(), scope);
+    mergeIndexesCommands = manager.meter(this, registry, "merges", getCategory().toString(), scope);
+    expungeDeleteCommands = manager.meter(this, registry, "expungeDeletes", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope);
+    manager.registerGauge(this, registry, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope);
+
+    addCommandsCumulative = manager.meter(this, registry, "cumulativeAdds", getCategory().toString(), scope);
+    deleteByIdCommandsCumulative = manager.meter(this, registry, "cumulativeDeletesById", getCategory().toString(), scope);
+    deleteByQueryCommandsCumulative = manager.meter(this, registry, "cumulativeDeletesByQuery", getCategory().toString(), scope);
+    numErrorsCumulative = manager.meter(this, registry, "cumulativeErrors", getCategory().toString(), scope);
   }
 
   private void deleteAll() throws IOException {
@@ -755,7 +768,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
    */
   @Override
   public void rollback(RollbackUpdateCommand cmd) throws IOException {
-    if (core.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
+    if (core.getCoreContainer().isZooKeeperAware()) {
       throw new UnsupportedOperationException("Rollback is currently not supported in SolrCloud mode. (SOLR-4895)");
     }
 
@@ -811,7 +824,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   @Override
   public void closeWriter(IndexWriter writer) throws IOException {
 
-    assert TestInjection.injectNonGracefullClose(core.getCoreDescriptor().getCoreContainer());
+    assert TestInjection.injectNonGracefullClose(core.getCoreContainer());
     
     boolean clearRequestInfo = false;
     solrCoreState.getCommitLock().lock();
@@ -951,7 +964,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
 
 
   /////////////////////////////////////////////////////////////////////
-  // SolrInfoMBean stuff: Statistics and Module Info
+  // SolrInfoBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////
 
   @Override
@@ -960,70 +973,11 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   }
 
   @Override
-  public String getVersion() {
-    return SolrCore.version;
-  }
-
-  @Override
   public String getDescription() {
     return "Update handler that efficiently directly updates the on-disk main lucene index";
   }
 
   @Override
-  public String getSource() {
-    return null;
-  }
-
-  @Override
-  public URL[] getDocs() {
-    return null;
-  }
-
-  @Override
-  public NamedList getStatistics() {
-    NamedList lst = new SimpleOrderedMap();
-    lst.add("commits", commitCommands.getCount());
-    if (commitTracker.getDocsUpperBound() > 0) {
-      lst.add("autocommit maxDocs", commitTracker.getDocsUpperBound());
-    }
-    if (commitTracker.getTimeUpperBound() > 0) {
-      lst.add("autocommit maxTime", "" + commitTracker.getTimeUpperBound() + "ms");
-    }
-    lst.add("autocommits", commitTracker.getCommitCount());
-    if (softCommitTracker.getDocsUpperBound() > 0) {
-      lst.add("soft autocommit maxDocs", softCommitTracker.getDocsUpperBound());
-    }
-    if (softCommitTracker.getTimeUpperBound() > 0) {
-      lst.add("soft autocommit maxTime", "" + softCommitTracker.getTimeUpperBound() + "ms");
-    }
-    lst.add("soft autocommits", softCommitTracker.getCommitCount());
-    lst.add("optimizes", optimizeCommands.getCount());
-    lst.add("rollbacks", rollbackCommands.getCount());
-    lst.add("expungeDeletes", expungeDeleteCommands.getCount());
-    lst.add("docsPending", numDocsPending.longValue());
-    // pset.size() not synchronized, but it should be fine to access.
-    // lst.add("deletesPending", pset.size());
-    lst.add("adds", addCommands.longValue());
-    lst.add("deletesById", deleteByIdCommands.longValue());
-    lst.add("deletesByQuery", deleteByQueryCommands.longValue());
-    lst.add("errors", numErrors.longValue());
-    lst.add("cumulative_adds", addCommandsCumulative.getCount());
-    lst.add("cumulative_deletesById", deleteByIdCommandsCumulative.getCount());
-    lst.add("cumulative_deletesByQuery", deleteByQueryCommandsCumulative.getCount());
-    lst.add("cumulative_errors", numErrorsCumulative.getCount());
-    if (this.ulog != null) {
-      lst.add("transaction_logs_total_size", ulog.getTotalLogsSize());
-      lst.add("transaction_logs_total_number", ulog.getTotalLogsNumber());
-    }
-    return lst;
-  }
-
-  @Override
-  public String toString() {
-    return "DirectUpdateHandler2" + getStatistics();
-  }
-  
-  @Override
   public SolrCoreState getSolrCoreState() {
     return solrCoreState;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
index 71e20d9..7bb74d0 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
@@ -37,7 +37,7 @@ import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.util.HdfsUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -262,7 +262,7 @@ public class HdfsUpdateLog extends UpdateLog {
     }
 
     // initialize metrics
-    core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.TLOG.toString(), this);
+    core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.TLOG.toString(), this);
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8df9f8c/solr/core/src/java/org/apache/solr/update/PeerSync.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index 9470cca..dfadb0c 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -43,7 +43,7 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
 import org.apache.solr.handler.component.ShardHandler;
 import org.apache.solr.handler.component.ShardHandlerFactory;
@@ -151,25 +151,25 @@ public class PeerSync implements SolrMetricProducer {
     this.cantReachIsSuccess = cantReachIsSuccess;
     this.getNoVersionsIsSuccess = getNoVersionsIsSuccess;
     this.doFingerprint = doFingerprint && !("true".equals(System.getProperty("solr.disableFingerprint")));
-    this.client = core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getHttpClient();
+    this.client = core.getCoreContainer().getUpdateShardHandler().getHttpClient();
     this.onlyIfActive = onlyIfActive;
     
     uhandler = core.getUpdateHandler();
     ulog = uhandler.getUpdateLog();
     // TODO: close
-    shardHandlerFactory = (HttpShardHandlerFactory) core.getCoreDescriptor().getCoreContainer().getShardHandlerFactory();
+    shardHandlerFactory = (HttpShardHandlerFactory) core.getCoreContainer().getShardHandlerFactory();
     shardHandler = shardHandlerFactory.getShardHandler(client);
 
-    core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.REPLICATION.toString(), this);
+    core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.REPLICATION.toString(), this);
   }
 
   public static final String METRIC_SCOPE = "peerSync";
 
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
-    syncTime = manager.timer(registry, "time", scope, METRIC_SCOPE);
-    syncErrors = manager.counter(registry, "errors", scope, METRIC_SCOPE);
-    syncSkipped = manager.counter(registry, "skipped", scope, METRIC_SCOPE);
+    syncTime = manager.timer(null, registry, "time", scope, METRIC_SCOPE);
+    syncErrors = manager.counter(null, registry, "errors", scope, METRIC_SCOPE);
+    syncSkipped = manager.counter(null, registry, "skipped", scope, METRIC_SCOPE);
   }
 
   /** optional list of updates we had before possibly receiving new updates */
@@ -184,7 +184,7 @@ public class PeerSync implements SolrMetricProducer {
 
   // start of peersync related debug messages.  includes the core name for correlation.
   private String msg() {
-    ZkController zkController = uhandler.core.getCoreDescriptor().getCoreContainer().getZkController();
+    ZkController zkController = uhandler.core.getCoreContainer().getZkController();
 
     String myURL = "";
 
@@ -882,7 +882,7 @@ public class PeerSync implements SolrMetricProducer {
 
   /** Requests and applies recent updates from peers */
   public static void sync(SolrCore core, List<String> replicas, int nUpdates) {
-    ShardHandlerFactory shardHandlerFactory = core.getCoreDescriptor().getCoreContainer().getShardHandlerFactory();
+    ShardHandlerFactory shardHandlerFactory = core.getCoreContainer().getShardHandlerFactory();
 
     ShardHandler shardHandler = shardHandlerFactory.getShardHandler();