You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by is...@apache.org on 2019/10/28 22:58:42 UTC

[lucene-solr] branch jira/solr-13662-2 created (now 231b01a)

This is an automated email from the ASF dual-hosted git repository.

ishan pushed a change to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git.


      at 231b01a  Fix merge conflicts

This branch includes the following new commits:

     new fe77a45  SOLR-13822: Isolated class loading from packages
     new 300d8d2  More tests and made more robust
     new f0133c1  Support for per collection override of package versions
     new 29669e6  precommit errors fixed
     new 3ddc903  SOLR-13662: Package Manager (CLI) first cut
     new 95b8854  Latest auto-update functionality
     new d19d1f1  add a 'refresh' command to force refresh loading of packages
     new df7baa6  Fix latest auto update
     new 3c81f15  Merging master
     new 231b01a  Fix merge conflicts

The 10 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[lucene-solr] 08/10: Fix latest auto update

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit df7baa66448730e614cbd09bdd2969e9c66b6231
Author: Ishan Chattopadhyaya <is...@apache.org>
AuthorDate: Tue Oct 29 03:31:57 2019 +0530

    Fix latest auto update
---
 .../apache/solr/packagemanager/SolrPackageManager.java   | 10 +++++++++-
 solr/core/src/java/org/apache/solr/util/PackageTool.java | 16 +++++++++++-----
 2 files changed, 20 insertions(+), 6 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
index 27fb010..a59feed 100644
--- a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
@@ -137,8 +137,11 @@ public class SolrPackageManager implements Closeable {
       }
 
       // Set the package version in the collection's parameters
-      postJson(solrBaseUrl+"/api/collections/abc/config/params", "{set:{PKG_VERSIONS:{"+packageName+" : '"+(pegToLatest? "$LATEST": version)+"'}}}");
+      postJson(solrBaseUrl+"/api/collections/"+collection+"/config/params", "{set:{PKG_VERSIONS:{"+packageName+" : '"+(pegToLatest? "$LATEST": version)+"'}}}");
 
+      String paramsJson = SolrPackageManager.get("http://localhost:8983/api/collections/"+collection+"/config/params?omitHeader=true");
+      System.out.println("Posted param: "+paramsJson);
+      
       // If updating, refresh the package version for this to take effect
       if (isUpdate || pegToLatest) {
         postJson(solrBaseUrl+"/api/cluster/package", "{\"refresh\" : \""+packageName+"\"}");
@@ -160,6 +163,11 @@ public class SolrPackageManager implements Closeable {
         }
       }
 
+      // Set the package version in the collection's parameters
+      postJson(solrBaseUrl+"/api/collections/"+collection+"/config/params", "{update:{PKG_VERSIONS:{'"+packageName+"' : '"+(pegToLatest? "$LATEST": version)+"'}}}");
+      paramsJson = SolrPackageManager.get("http://localhost:8983/api/collections/"+collection+"/config/params?omitHeader=true");
+      System.out.println("Posted param: "+paramsJson);
+
     }
 
     // Verify that package was successfully deployed
diff --git a/solr/core/src/java/org/apache/solr/util/PackageTool.java b/solr/core/src/java/org/apache/solr/util/PackageTool.java
index e12af04..1f7e2d4 100644
--- a/solr/core/src/java/org/apache/solr/util/PackageTool.java
+++ b/solr/core/src/java/org/apache/solr/util/PackageTool.java
@@ -214,7 +214,7 @@ public class PackageTool extends SolrCLI.ToolBase {
       SolrPackageInstance installedPackage = packageManager.getPackage(packageName, "latest");
       System.out.println("Updating ["+packageName+"] from " + installedPackage.getVersion() + " to version "+latestVersion);
 
-      List<String> collectionsDeployedIn = getDeployedCollections(zkHost, packageManager, installedPackage);
+      List<String> collectionsDeployedIn = getDeployedCollections(zkHost, packageManager, installedPackage, true);
       System.out.println("Already deployed on collections: "+collectionsDeployedIn);
       updateManager.updatePackage(packageName, latestVersion);
 
@@ -228,7 +228,7 @@ public class PackageTool extends SolrCLI.ToolBase {
     }
   }
 
-  private List<String> getDeployedCollections(String zkHost, SolrPackageManager packageManager, SolrPackageInstance pkg) {
+  private List<String> getDeployedCollections(String zkHost, SolrPackageManager packageManager, SolrPackageInstance pkg, boolean onlyLatest) {
 
     List<String> allCollections;
     try (SolrZkClient zkClient = new SolrZkClient(zkHost, 30000)) {
@@ -248,10 +248,16 @@ public class PackageTool extends SolrCLI.ToolBase {
       } catch (PathNotFoundException ex) {
         // Don't worry if PKG_VERSION wasn't found. It just means this collection was never touched by the package manager.
       }
-      if ("$LATEST".equals(version) && packageManager.verify(pkg, Collections.singletonList(collection))) {
-        deployed.add(collection);
+      if (onlyLatest) {
+        if ("$LATEST".equals(version) && packageManager.verify(pkg, Collections.singletonList(collection))) {
+          deployed.add(collection);
+        } else {
+          System.out.println("Skipping collection: "+collection+", version: "+version);
+        }
       } else {
-        System.out.println("Skipping collection: "+collection+", version: "+version);
+        if (packageManager.verify(pkg, Collections.singletonList(collection))) {
+          deployed.add(collection);
+        }
       }
     }
     return deployed;


[lucene-solr] 04/10: precommit errors fixed

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 29669e6d5dc2b1f2fcbeb1c88cb785cc9bca62ca
Author: noble <no...@apache.org>
AuthorDate: Thu Oct 17 22:45:42 2019 +1100

    precommit errors fixed
---
 solr/core/src/java/org/apache/solr/pkg/PackageAPI.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index afa582d..4bd822e 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -75,7 +75,7 @@ public class PackageAPI {
     try {
       registerListener(zkClient);
     } catch (KeeperException | InterruptedException e) {
-      e.printStackTrace();
+      SolrZkClient.checkInterrupted(e);
     }
   }
 


[lucene-solr] 03/10: Support for per collection override of package versions

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit f0133c1a823007b4d43a6dc1ea41f5fdfd19f78e
Author: noble <no...@apache.org>
AuthorDate: Thu Oct 17 22:29:48 2019 +1100

    Support for per collection override of package versions
---
 .../java/org/apache/solr/core/RequestParams.java   |  9 +++++
 .../src/java/org/apache/solr/core/SolrCore.java    |  2 +-
 .../java/org/apache/solr/pkg/PackageListeners.java | 28 ++++++++++----
 .../java/org/apache/solr/pkg/PackageLoader.java    | 44 +++++++++++++---------
 .../org/apache/solr/pkg/PackagePluginHolder.java   | 39 +++++++++++++++----
 .../src/test/org/apache/solr/pkg/TestPackages.java | 30 +++++++++++++++
 6 files changed, 119 insertions(+), 33 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/core/RequestParams.java b/solr/core/src/java/org/apache/solr/core/RequestParams.java
index 50a4fd0..d1f7f3d 100644
--- a/solr/core/src/java/org/apache/solr/core/RequestParams.java
+++ b/solr/core/src/java/org/apache/solr/core/RequestParams.java
@@ -250,9 +250,18 @@ public class RequestParams implements MapSerializable {
       return m1;
     }
 
+    /**
+     * @param type one of defaults, appends, invariants
+     */
     public VersionedParams getParams(String type) {
       return paramsMap.get(type);
     }
+
+    /**get the raw map
+     */
+    public Map<String, Object> get() {
+      return defaults;
+    }
   }
 
   public static class VersionedParams extends MapSolrParams {
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 7b6d55e..8315047 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -239,7 +239,7 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
   public volatile boolean indexEnabled = true;
   public volatile boolean readOnly = false;
 
-  private PackageListeners packageListeners = new PackageListeners();
+  private PackageListeners packageListeners = new PackageListeners(this);
 
   public Set<String> getMetricNames() {
     return metricNames;
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
index 3ac12ef..3ccd431 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
@@ -25,12 +25,21 @@ import java.util.Iterator;
 import java.util.List;
 
 import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.logging.MDCLoggingContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class PackageListeners {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  public static final String PACKAGE_VERSIONS = "PKG_VERSIONS";
+  private SolrCore core;
+
+  public PackageListeners(SolrCore core) {
+    this.core = core;
+  }
+
   // this registry only keeps a weak reference because it does not want to
   // cause a memory leak if the listener forgets to unregister itself
   private List<Reference<Listener>> listeners = new ArrayList<>();
@@ -45,7 +54,7 @@ public class PackageListeners {
     while (it.hasNext()) {
       Reference<Listener> ref = it.next();
       Listener pkgListener = ref.get();
-      if(pkgListener == null || pkgListener == listener){
+      if (pkgListener == null || pkgListener == listener) {
         it.remove();
       }
 
@@ -53,9 +62,15 @@ public class PackageListeners {
 
   }
 
-  synchronized void packagesUpdated(List<PackageLoader.Package> pkgs){
-    for (PackageLoader.Package pkgInfo : pkgs) {
-      invokeListeners(pkgInfo);
+  synchronized void packagesUpdated(List<PackageLoader.Package> pkgs) {
+    if(core != null) MDCLoggingContext.setCore(core);
+    try {
+      for (PackageLoader.Package pkgInfo : pkgs) {
+        invokeListeners(pkgInfo);
+      }
+    } finally {
+      if(core != null) MDCLoggingContext.clear();
+
     }
   }
 
@@ -68,11 +83,11 @@ public class PackageListeners {
     }
   }
 
-  public List<Listener> getListeners(){
+  public List<Listener> getListeners() {
     List<Listener> result = new ArrayList<>();
     for (Reference<Listener> ref : listeners) {
       Listener l = ref.get();
-      if(l != null){
+      if (l != null) {
         result.add(l);
       }
     }
@@ -80,7 +95,6 @@ public class PackageListeners {
   }
 
 
-
   public interface Listener {
     String packageName();
 
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
index d131647..fb62a5f 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
@@ -41,6 +41,9 @@ import org.apache.solr.core.SolrResourceLoader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * The class that holds a mapping of various packages and classloaders
+ */
 public class PackageLoader {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -105,7 +108,7 @@ public class PackageLoader {
       List<PackageAPI.PkgVersion> versions = old.packages.get(e.getKey());
       if (versions != null) {
         if (!Objects.equals(e.getValue(), versions)) {
-          log.info("Package {} is modified ",e.getKey());
+          log.info("Package {} is modified ", e.getKey());
           changed.put(e.getKey(), e.getValue());
         }
       } else {
@@ -125,7 +128,9 @@ public class PackageLoader {
 
   }
 
-
+  /**
+   * represents a package definition in the packages.json
+   */
   public class Package {
     final String name;
     final Map<String, Version> myVersions = new ConcurrentHashMap<>();
@@ -134,7 +139,7 @@ public class PackageLoader {
     private boolean deleted;
 
 
-    public Package(String name) {
+    Package(String name) {
       this.name = name;
     }
 
@@ -147,7 +152,7 @@ public class PackageLoader {
       for (PackageAPI.PkgVersion v : modified) {
         Version version = myVersions.get(v.version);
         if (version == null) {
-          log.info("A new version: {} added for package: {} with artifacts {}", v.version,  this.name, v.files);
+          log.info("A new version: {} added for package: {} with artifacts {}", v.version, this.name, v.files);
           myVersions.put(v.version, new Version(this, v));
           sortedVersions.add(v.version);
         }
@@ -159,7 +164,7 @@ public class PackageLoader {
       }
       for (String s : new HashSet<>(myVersions.keySet())) {
         if (!newVersions.contains(s)) {
-          log.info("version: {} is removed from package: {}", s,  this.name);
+          log.info("version: {} is removed from package: {}", s, this.name);
           sortedVersions.remove(s);
           myVersions.remove(s);
         }
@@ -168,8 +173,8 @@ public class PackageLoader {
       sortedVersions.sort(String::compareTo);
       if (sortedVersions.size() > 0) {
         String latest = sortedVersions.get(sortedVersions.size() - 1);
-        if(!latest.equals(this.latest)){
-          log.info("version: {} is the new latest in package: {}", latest,  this.name);
+        if (!latest.equals(this.latest)) {
+          log.info("version: {} is the new latest in package: {}", latest, this.name);
         }
         this.latest = latest;
       } else {
@@ -185,15 +190,10 @@ public class PackageLoader {
     }
 
     public Version getLatest(String lessThan) {
-      String latest = null;
-      for (String v : (ArrayList<String>) new ArrayList(sortedVersions)) {
-        if (v.compareTo(lessThan) < 1) {
-          latest = v;
-        } else break;
-
+      if (lessThan == null) {
+        return getLatest();
       }
-
-
+      String latest = findBiggest(lessThan, new ArrayList(sortedVersions));
       return latest == null ? null : myVersions.get(latest);
     }
 
@@ -221,7 +221,7 @@ public class PackageLoader {
         version.writeMap(ew);
       }
 
-      public Version(Package parent, PackageAPI.PkgVersion v) {
+      Version(Package parent, PackageAPI.PkgVersion v) {
         this.parent = parent;
         this.version = v;
         List<Path> paths = new ArrayList<>();
@@ -231,7 +231,7 @@ public class PackageLoader {
 
         try {
           loader = new SolrResourceLoader(
-              "PACKAGE_LOADER: "+ parent.name()+ ":"+ version,
+              "PACKAGE_LOADER: " + parent.name() + ":" + version,
               paths,
               coreContainer.getResourceLoader().getInstancePath(),
               coreContainer.getResourceLoader().getClassLoader());
@@ -253,4 +253,14 @@ public class PackageLoader {
       }
     }
   }
+
+  private static String findBiggest(String lessThan, List<String> sortedList) {
+    String latest = null;
+    for (String v : sortedList) {
+      if (v.compareTo(lessThan) < 1) {
+        latest = v;
+      } else break;
+    }
+    return latest;
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
index 73e7f90..63facde 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
@@ -21,6 +21,7 @@ import java.lang.invoke.MethodHandles;
 
 import org.apache.solr.core.PluginBag;
 import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.RequestParams;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.slf4j.Logger;
@@ -28,19 +29,21 @@ import org.slf4j.LoggerFactory;
 
 public class PackagePluginHolder<T> extends PluginBag.PluginHolder<T> {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  public static final String LATEST = "$LATEST";
 
   private final SolrCore core;
   private final SolrConfig.SolrPluginInfo pluginMeta;
-  private PackageLoader.Package aPackage;
   private PackageLoader.Package.Version pkgVersion;
+  private PluginInfo info;
 
 
   public PackagePluginHolder(PluginInfo info, SolrCore core, SolrConfig.SolrPluginInfo pluginMeta) {
     super(info);
     this.core = core;
     this.pluginMeta = pluginMeta;
+    this.info = info;
 
-    reload(aPackage = core.getCoreContainer().getPackageLoader().getPackage(info.pkgName));
+    reload(core.getCoreContainer().getPackageLoader().getPackage(info.pkgName));
     core.getPackageListeners().addListener(new PackageListeners.Listener() {
       @Override
       public String packageName() {
@@ -66,18 +69,38 @@ public class PackagePluginHolder<T> extends PluginBag.PluginHolder<T> {
     });
   }
 
+  private String maxVersion() {
+    RequestParams.ParamSet p = core.getSolrConfig().getRequestParams().getParams(PackageListeners.PACKAGE_VERSIONS);
+    if (p == null) {
+      return null;
+    }
+    Object o = p.get().get(info.pkgName);
+    if (o == null || LATEST.equals(o)) return null;
+    return o.toString();
+  }
+
 
   private synchronized void reload(PackageLoader.Package pkg) {
-    if (pkgVersion != null && pkg.getLatest() == pkgVersion) {
-      //I'm already using the latest classloder in the package. nothing to do
+    String lessThan = maxVersion();
+    PackageLoader.Package.Version newest = pkg.getLatest(lessThan);
+    if (newest == null) {
+      log.error("No latest version available for package : {}", pkg.name());
       return;
     }
+    if (lessThan != null) {
+      PackageLoader.Package.Version pkgLatest = pkg.getLatest();
+      if (pkgLatest != newest) {
+        log.info("Using version :{}. latest is {},  params.json has config {} : {}", newest.getVersion(), pkgLatest.getVersion(), pkg.name(), lessThan);
+      }
+    }
 
-    PackageLoader.Package.Version newest = pkg.getLatest();
-    if (newest == null){
-      log.error("No latest version available for package : {}", pkg.name());
-      return;
+    if (pkgVersion != null) {
+      if (newest == pkgVersion) {
+        //I'm already using the latest classloder in the package. nothing to do
+        return;
+      }
     }
+
     log.info("loading plugin: {} -> {} using  package {}:{}",
         pluginInfo.type, pluginInfo.name, pkg.name(), newest.getVersion());
 
diff --git a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
index 6c0c036..92417d4 100644
--- a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
+++ b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
@@ -33,13 +33,16 @@ import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.client.solrj.request.RequestWriter;
 import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.cloud.ConfigRequest;
 import org.apache.solr.cloud.MiniSolrCloudCluster;
 import org.apache.solr.cloud.SolrCloudTestCase;
 import org.apache.solr.common.MapWriterMap;
 import org.apache.solr.common.NavigableObject;
 import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.filestore.TestDistribPackageStore;
@@ -218,6 +221,7 @@ public class TestPackages extends SolrCloudTestCase {
           COLLECTION_NAME, "requestHandler", "/runtime",
           "mypkg", "2.1" );
 
+      // now remove the hughest version. So, it will roll back to the next highest one
       delVersion.version = "2.1";
       delete.process(cluster.getSolrClient());
 
@@ -233,6 +237,32 @@ public class TestPackages extends SolrCloudTestCase {
           COLLECTION_NAME, "requestHandler", "/runtime",
           "mypkg", "1.1" );
 
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.add("collection", COLLECTION_NAME);
+      new GenericSolrRequest(SolrRequest.METHOD.POST, "/config/params", params ){
+        @Override
+        public RequestWriter.ContentWriter getContentWriter(String expectedType) {
+          return new RequestWriter.StringPayloadContentWriter("{set:{PKG_VERSIONS:{mypkg : '1.1'}}}",
+              ClientUtils.TEXT_JSON);
+        }
+      }
+          .process(cluster.getSolrClient()) ;
+
+      add.version = "2.1";
+      add.files = Arrays.asList(new String[]{FILE3});
+      req.process(cluster.getSolrClient());
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "queryResponseWriter", "json1",
+          "mypkg", "1.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "searchComponent", "get",
+          "mypkg", "1.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "requestHandler", "/runtime",
+          "mypkg", "1.1" );
 
 
 


[lucene-solr] 06/10: Latest auto-update functionality

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 95b88548e3803f4acb1a373f7b9bfc073b67f149
Author: Ishan Chattopadhyaya <is...@apache.org>
AuthorDate: Mon Oct 28 21:40:56 2019 +0530

    Latest auto-update functionality
---
 .../solr/packagemanager/SolrPackageManager.java    | 55 +++++++++++++++-------
 .../src/java/org/apache/solr/util/PackageTool.java | 38 +++++++++++++--
 .../apache/solr/cloud/PackageManagerCLITest.java   | 39 +++++++++++++--
 3 files changed, 106 insertions(+), 26 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
index e351a68..27fb010 100644
--- a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
@@ -98,23 +98,36 @@ public class SolrPackageManager implements Closeable {
     return ret;
   }
 
-  public boolean deployInstallPackage(String packageName, String version, List<String> collections, String overrides[]) {
+  Map<String, String> getPackageParams(String packageName, String collection) {
+    try {
+      return (Map<String, String>)((Map)((Map)((Map)new ObjectMapper().readValue
+          (get(solrBaseUrl + "/api/collections/"+collection+"/config/params/packages"), Map.class).get("response")).get("params")).get("packages")).get(packageName);
+    } catch (IOException e) {
+      throw new PackageManagerException(e);
+    }
+
+  }
+  
+  public boolean deployInstallPackage(String packageName, String version, boolean isUpdate, List<String> collections, String overrides[]) {
+    boolean pegToLatest = "latest".equals(version); // User wants to peg this package's version to the latest installed (for auto-update, i.e. no explicit deploy step)
     SolrPackageInstance pkg = getPackage(packageName, version);
+    if (version == null) {
+      version = pkg.getVersion();
+    }
 
     for (String collection: collections) {
-      Map<String, String> collectionParameterOverrides = new HashMap<String,String>();
+      Map<String, String> collectionParameterOverrides = isUpdate? getPackageParams(packageName, collection): new HashMap<String,String>();
       if (overrides != null) {
         for (String override: overrides) {
           collectionParameterOverrides.put(override.split("=")[0], override.split("=")[1]);
         }
       }
+      
+      // Get package params
       try {
-        // nocommit: it overwrites params of other packages (use set or update)
-        
         boolean packageParamsExist = ((Map)((Map)new ObjectMapper().readValue(
             get(solrBaseUrl + "/api/collections/abc/config/params/packages"), Map.class)
             ).get("response")).containsKey("params");
-        
         postJson(solrBaseUrl + "/api/collections/"+collection+"/config/params",
             new ObjectMapper().writeValueAsString(
                 Map.of(packageParamsExist? "update": "set", 
@@ -123,20 +136,33 @@ public class SolrPackageManager implements Closeable {
         throw new RuntimeException(e);
       }
 
+      // Set the package version in the collection's parameters
+      postJson(solrBaseUrl+"/api/collections/abc/config/params", "{set:{PKG_VERSIONS:{"+packageName+" : '"+(pegToLatest? "$LATEST": version)+"'}}}");
+
+      // If updating, refresh the package version for this to take effect
+      if (isUpdate || pegToLatest) {
+        postJson(solrBaseUrl+"/api/cluster/package", "{\"refresh\" : \""+packageName+"\"}");
+      }
+      
+      // Setup/update all the plugins in the package
       for (Plugin p: pkg.getPlugins()) {
-        System.out.println(p.setupCommand);
+        System.out.println(isUpdate? p.updateCommand: p.setupCommand);
 
         Map<String, String> systemParams = new HashMap<String,String>();
         systemParams.put("collection", collection);
         systemParams.put("package-name", pkg.id);
         systemParams.put("package-version", pkg.version);
 
-        String cmd = resolve(p.setupCommand, pkg.parameterDefaults, collectionParameterOverrides, systemParams);
-        System.out.println("Executing " + cmd + " for collection:" + collection);
-        postJson(solrBaseUrl + "/solr/"+collection+"/config", cmd);
+        String cmd = resolve(isUpdate? p.updateCommand: p.setupCommand, pkg.parameterDefaults, collectionParameterOverrides, systemParams);
+        if (cmd != null && !"".equals(cmd.trim())) {
+          System.out.println("Executing " + cmd + " for collection:" + collection);
+          postJson(solrBaseUrl + "/solr/"+collection+"/config", cmd);
+        }
       }
+
     }
 
+    // Verify that package was successfully deployed
     boolean success = verify(pkg, collections);
     if (success) {
       System.out.println("Deployed and verified package: "+pkg.id+", version: "+pkg.version);
@@ -145,6 +171,7 @@ public class SolrPackageManager implements Closeable {
   }
 
   private String resolve(String str, Map<String, String> defaults, Map<String, String> overrides, Map<String, String> systemParams) {
+    if (str == null) return null;
     for (String param: defaults.keySet()) {
       str = str.replaceAll("\\$\\{"+param+"\\}", overrides.containsKey(param)? overrides.get(param): defaults.get(param));
     }
@@ -164,13 +191,7 @@ public class SolrPackageManager implements Closeable {
       System.out.println(p.verifyCommand);
       for (String collection: collections) {
         System.out.println("Executing " + p.verifyCommand + " for collection:" + collection);
-        Map<String, String> collectionParameterOverrides;
-        try {
-          collectionParameterOverrides = (Map<String, String>)((Map)((Map)((Map)new ObjectMapper().readValue
-              (get(solrBaseUrl + "/api/collections/abc/config/params/packages"), Map.class).get("response")).get("params")).get("packages")).get(pkg.id);
-        } catch (IOException e) {
-          throw new RuntimeException(e);
-        }
+        Map<String, String> collectionParameterOverrides = getPackageParams(pkg.id, collection);
         
         Command cmd = p.verifyCommand;
 
@@ -217,7 +238,7 @@ public class SolrPackageManager implements Closeable {
     return get(url);
   }
 
-  private String get(String url) {
+  public static String get(String url) {
     try (CloseableHttpClient client = SolrUpdateManager.createTrustAllHttpClientBuilder()) {
       HttpGet httpGet = new HttpGet(url);
       httpGet.setHeader("Content-type", "application/json");
diff --git a/solr/core/src/java/org/apache/solr/util/PackageTool.java b/solr/core/src/java/org/apache/solr/util/PackageTool.java
index f5491e6..e12af04 100644
--- a/solr/core/src/java/org/apache/solr/util/PackageTool.java
+++ b/solr/core/src/java/org/apache/solr/util/PackageTool.java
@@ -48,6 +48,8 @@ import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
 
 
 public class PackageTool extends SolrCLI.ToolBase {
@@ -64,7 +66,7 @@ public class PackageTool extends SolrCLI.ToolBase {
   @Override
   protected void runImpl(CommandLine cli) throws Exception {
     // Need a logging free, clean output going through to the user.
-    Configurator.setRootLevel(Level.OFF);
+    Configurator.setRootLevel(Level.INFO);
 
     solrUrl = cli.getOptionValues("solrUrl")[cli.getOptionValues("solrUrl").length-1];
     String solrBaseUrl = solrUrl.replaceAll("\\/solr$", ""); // strip out ending "/solr"
@@ -103,7 +105,10 @@ public class PackageTool extends SolrCLI.ToolBase {
               String colls[] = cli.getOptionValues("collections");
               String params[] = cli.getOptionValues("param");
               System.out.println("coll: "+Arrays.toString(colls)+", params: "+Arrays.toString(params));
-              deploy(cli.getArgList().get(1).toString(), colls, params);
+              String packageName = cli.getArgList().get(1).toString().split(":")[0];
+              String version = cli.getArgList().get(1).toString().contains(":")? 
+                  cli.getArgList().get(1).toString().split(":")[1]: null;
+              deploy(packageName, version, cli.hasOption("update"), colls, params);
               break;
             case "redeploy":
               redeploy(cli.getArgList().subList(1, cli.getArgList().size()));
@@ -175,10 +180,10 @@ public class PackageTool extends SolrCLI.ToolBase {
     updateManager.installPackage(args.get(0).toString(), args.get(1).toString());
     System.out.println(args.get(0).toString() + " installed.");
   }
-  protected void deploy(String packageName,
+  protected void deploy(String packageName, String version, boolean isUpdate,
       String collections[], String parameters[]) throws PackageManagerException {
     
-    System.out.println(packageManager.deployInstallPackage(packageName.split(":")[0], packageName.split(":").length==2? packageName.split(":")[1]: "latest",
+    System.out.println(packageManager.deployInstallPackage(packageName, version, isUpdate,
         Arrays.asList(collections), parameters));
   }
 
@@ -234,8 +239,19 @@ public class PackageTool extends SolrCLI.ToolBase {
     System.out.println("Need to verify if these collections have the plugin installed? "+ allCollections);
     List<String> deployed = new ArrayList<String>();
     for (String collection: allCollections) {
-      if (packageManager.verify(pkg, Collections.singletonList(collection))) {
+      // Check package version installed
+      // http://localhost:8983/api/collections/abc/config/params/PKG_VERSIONS?omitHeader=true
+      String paramsJson = SolrPackageManager.get("http://localhost:8983/api/collections/"+collection+"/config/params/PKG_VERSIONS?omitHeader=true");
+      String version = null;
+      try {
+        version = JsonPath.parse(paramsJson).read("$['response'].['params'].['PKG_VERSIONS'].['"+pkg.id+"'])");
+      } catch (PathNotFoundException ex) {
+        // Don't worry if PKG_VERSION wasn't found. It just means this collection was never touched by the package manager.
+      }
+      if ("$LATEST".equals(version) && packageManager.verify(pkg, Collections.singletonList(collection))) {
         deployed.add(collection);
+      } else {
+        System.out.println("Skipping collection: "+collection+", version: "+version);
       }
     }
     return deployed;
@@ -266,6 +282,18 @@ public class PackageTool extends SolrCLI.ToolBase {
         .withLongOpt("param")
         .create("p"),
 
+        OptionBuilder
+        .isRequired(false)
+        .withDescription("Solr URL scheme: http or https, defaults to http if not specified")
+        .withLongOpt("update")
+        .create("u"),
+
+        OptionBuilder
+        .isRequired(false)
+        .withDescription("Solr URL scheme: http or https, defaults to http if not specified")
+        .withLongOpt("auto-update")
+        .create(),
+        
     };
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java b/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java
index 99286df..b918d2b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.cloud;
 
+import java.lang.invoke.MethodHandles;
 import java.nio.file.Path;
 
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -24,9 +25,13 @@ import org.apache.solr.util.PackageTool;
 import org.apache.solr.util.SolrCLI;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class PackageManagerCLITest extends SolrCloudTestCase {
 
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
   @BeforeClass
   public static void setupCluster() throws Exception {
     System.setProperty("enable.packages", "true");
@@ -65,16 +70,42 @@ public class PackageManagerCLITest extends SolrCloudTestCase {
     assertEquals("tool should have returned 0 for success ", 0, res);
 
     CollectionAdminRequest
-      .createCollection("abc", "conf1", 2, 1)
+      .createCollection("abc", "conf1", 1, 1)
       .setMaxShardsPerNode(100)
       .process(cluster.getSolrClient());
 
+    CollectionAdminRequest
+    .createCollection("def", "conf1", 1, 1)
+    .setMaxShardsPerNode(100)
+    .process(cluster.getSolrClient());
+
     res = run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "question-answer", "-collections", "abc", "-p", "RH-HANDLER-PATH=/mypath2"});
     assertEquals("tool should have returned 0 for success ", 0, res);
     
-    res = run(tool, new String[] {"-solrUrl", solrUrl, "update", "question-answer"});
-    assertEquals("tool should have returned 0 for success ", 0, res);
-
+    // Should we test the "auto-update to latest" functionality or the default explicit deploy functionality
+    boolean autoUpdateToLatest = random().nextBoolean();
+    
+    if (autoUpdateToLatest) {
+      log.info("Testing auto-update to latest installed");
+      // This command pegs the version to the latest available
+      res = run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "question-answer:latest", "-collections", "abc"});
+      assertEquals("tool should have returned 0 for success ", 0, res);
+      
+      res = run(tool, new String[] {"-solrUrl", solrUrl, "update", "question-answer"});
+      assertEquals("tool should have returned 0 for success ", 0, res);
+    } else {
+      log.info("Testing explicit deployment to a different/newer version");
+
+      res = run(tool, new String[] {"-solrUrl", solrUrl, "update", "question-answer"});
+      assertEquals("tool should have returned 0 for success ", 0, res);
+
+      if (random().nextBoolean()) {
+        res = run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "--update", "question-answer", "-collections", "abc", "-p", "RH-HANDLER-PATH=/mypath2"});
+      } else {
+        res = run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "--update", "question-answer", "-collections", "abc"});
+      }
+      assertEquals("tool should have returned 0 for success ", 0, res);      
+    }
   }
 
   private int run(PackageTool tool, String[] args) throws Exception {


[lucene-solr] 02/10: More tests and made more robust

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 300d8d27374cac859fe8289186bb688f35d158ff
Author: noble <no...@apache.org>
AuthorDate: Wed Oct 16 17:43:33 2019 +1100

    More tests and made more robust
---
 .../src/java/org/apache/solr/api/AnnotatedApi.java |  3 +
 .../src/java/org/apache/solr/core/SolrCore.java    |  3 +-
 .../org/apache/solr/handler/SolrConfigHandler.java | 27 ++++-----
 .../src/java/org/apache/solr/pkg/PackageAPI.java   |  4 ++
 .../java/org/apache/solr/pkg/PackageListeners.java | 21 ++++---
 .../java/org/apache/solr/pkg/PackageLoader.java    | 24 ++++----
 .../org/apache/solr/pkg/PackagePluginHolder.java   | 14 ++++-
 .../src/test/org/apache/solr/pkg/TestPackages.java | 69 ++++++++++++++++++----
 8 files changed, 115 insertions(+), 50 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
index e9073ae..eca2283 100644
--- a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
+++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
@@ -239,10 +239,13 @@ public class AnnotatedApi extends Api implements PermissionNameProvider {
 
 
       } catch (SolrException se) {
+        log.error("Error executing command  ", se);
         throw se;
       } catch (InvocationTargetException ite) {
+        log.error("Error executing command ", ite);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, ite.getCause());
       } catch (Exception e) {
+        log.error("Error executing command : ", e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 59c9a7a..7b6d55e 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -287,7 +287,8 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
       return resourceLoader;
     }
     PackageLoader.Package aPackage = coreContainer.getPackageLoader().getPackage(pkg);
-    return aPackage.getLatest().getLoader();
+    PackageLoader.Package.Version latest = aPackage.getLatest();
+    return latest.getLoader();
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index 2085221..03b9600 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -151,7 +151,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
 
   public static boolean getImmutable(SolrCore core) {
     NamedList configSetProperties = core.getConfigSetProperties();
-    if(configSetProperties == null) return false;
+    if (configSetProperties == null) return false;
     Object immutable = configSetProperties.get(IMMUTABLE_CONFIGSET_ARG);
     return immutable != null ? Boolean.parseBoolean(immutable.toString()) : false;
   }
@@ -248,11 +248,12 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
               if (map != null) {
                 Object o = map.get(componentName);
                 val.put(parts.get(1), makeMap(componentName, o));
-                if(req.getParams().getBool("meta", false)){
+                if (req.getParams().getBool("meta", false)) {
+                  List<PackageListeners.Listener> listeners = req.getCore().getPackageListeners().getListeners();
                   for (PackageListeners.Listener listener :
-                      req.getCore().getPackageListeners().getListeners()) {
+                      listeners) {
                     PluginInfo info = listener.pluginInfo();
-                    if(info.type.equals(parts.get(1)) && info.name.equals(componentName)){
+                    if (info.type.equals(parts.get(1)) && info.name.equals(componentName)) {
                       if (o instanceof Map) {
                         Map m1 = (Map) o;
                         m1.put("_packageinfo_", listener.getPackageVersion());
@@ -261,9 +262,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
                   }
                 }
               }
-
             }
-
             resp.add("config", val);
           }
         }
@@ -440,7 +439,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
 
       List errs = CommandOperation.captureErrors(ops);
       if (!errs.isEmpty()) {
-        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing params", errs);
+        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing params", errs);
       }
 
       SolrResourceLoader loader = req.getCore().getResourceLoader();
@@ -503,8 +502,8 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
       }
       List errs = CommandOperation.captureErrors(ops);
       if (!errs.isEmpty()) {
-        log.error("ERRROR:" +Utils.toJSONString(errs));
-        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing commands", errs);
+        log.error("ERROR:" + Utils.toJSONString(errs));
+        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing commands", errs);
       }
 
       SolrResourceLoader loader = req.getCore().getResourceLoader();
@@ -542,8 +541,8 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
       op.getMap(PluginInfo.INVARIANTS, null);
       op.getMap(PluginInfo.APPENDS, null);
       if (op.hasError()) return overlay;
-      if(info.clazz == PluginBag.RuntimeLib.class) {
-        if(!PluginBag.RuntimeLib.isEnabled()){
+      if (info.clazz == PluginBag.RuntimeLib.class) {
+        if (!PluginBag.RuntimeLib.isEnabled()) {
           op.addError("Solr not started with -Denable.runtime.lib=true");
           return overlay;
         }
@@ -575,7 +574,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
 
     private boolean pluginExists(SolrConfig.SolrPluginInfo info, ConfigOverlay overlay, String name) {
       List<PluginInfo> l = req.getCore().getSolrConfig().getPluginInfos(info.clazz.getName());
-      for (PluginInfo pluginInfo : l) if(name.equals( pluginInfo.name)) return true;
+      for (PluginInfo pluginInfo : l) if (name.equals(pluginInfo.name)) return true;
       return overlay.getNamedPlugins(info.getCleanTag()).containsKey(name);
     }
 
@@ -586,7 +585,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
         try {
           req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), expected, clz, "");
         } catch (Exception e) {
-          log.error("Error checking plugin : ",e);
+          log.error("Error checking plugin : ", e);
           op.addError(e.getMessage());
           return false;
         }
@@ -692,7 +691,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
           c == '_' ||
           c == '-' ||
           c == '.'
-          ) continue;
+      ) continue;
       else {
         return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s);
       }
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index 0267f37..afa582d 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -310,9 +310,13 @@ public class PackageAPI {
     }
 
     private void syncToVersion(int expectedVersion) {
+      int origVersion = pkgs.znodeVersion;
       for (int i = 0; i < 10; i++) {
         log.debug("my version is {} , and expected version {}", pkgs.znodeVersion, expectedVersion);
         if (pkgs.znodeVersion >= expectedVersion) {
+          if(origVersion < pkgs.znodeVersion){
+            packageLoader.refreshPackageConf();
+          }
           return;
         }
         try {
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
index c6ebae5..3ac12ef 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
@@ -17,27 +17,33 @@
 
 package org.apache.solr.pkg;
 
-import java.lang.ref.WeakReference;
+import java.lang.invoke.MethodHandles;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
 import org.apache.solr.core.PluginInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class PackageListeners {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
   // this registry only keeps a weak reference because it does not want to
   // cause a memory leak if the listener forgets to unregister itself
-  private List<WeakReference<Listener>> listeners = new ArrayList<>();
+  private List<Reference<Listener>> listeners = new ArrayList<>();
 
   public synchronized void addListener(Listener listener) {
-    listeners.add(new WeakReference<>(listener));
+    listeners.add(new SoftReference<>(listener));
 
   }
 
   public synchronized void removeListener(Listener listener) {
-    Iterator<WeakReference<Listener>> it = listeners.iterator();
+    Iterator<Reference<Listener>> it = listeners.iterator();
     while (it.hasNext()) {
-      WeakReference<Listener> ref = it.next();
+      Reference<Listener> ref = it.next();
       Listener pkgListener = ref.get();
       if(pkgListener == null || pkgListener == listener){
         it.remove();
@@ -54,7 +60,7 @@ public class PackageListeners {
   }
 
   private synchronized void invokeListeners(PackageLoader.Package pkg) {
-    for (WeakReference<Listener> ref : listeners) {
+    for (Reference<Listener> ref : listeners) {
       Listener listener = ref.get();
       if (listener != null && listener.packageName().equals(pkg.name())) {
         listener.changed(pkg);
@@ -64,12 +70,11 @@ public class PackageListeners {
 
   public List<Listener> getListeners(){
     List<Listener> result = new ArrayList<>();
-    for (WeakReference<Listener> ref : listeners) {
+    for (Reference<Listener> ref : listeners) {
       Listener l = ref.get();
       if(l != null){
         result.add(l);
       }
-
     }
     return result;
   }
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
index 7efcc88..d131647 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
@@ -76,6 +76,7 @@ public class PackageLoader {
 
     List<Package> updated = new ArrayList<>();
     Map<String, List<PackageAPI.PkgVersion>> modified = getModified(myCopy, packageAPI.pkgs);
+
     for (Map.Entry<String, List<PackageAPI.PkgVersion>> e : modified.entrySet()) {
       if (e.getValue() != null) {
         Package p = packageClassLoaders.get(e.getKey());
@@ -95,7 +96,6 @@ public class PackageLoader {
     }
     for (SolrCore core : coreContainer.getCores()) {
       core.getPackageListeners().packagesUpdated(updated);
-
     }
   }
 
@@ -105,15 +105,18 @@ public class PackageLoader {
       List<PackageAPI.PkgVersion> versions = old.packages.get(e.getKey());
       if (versions != null) {
         if (!Objects.equals(e.getValue(), versions)) {
+          log.info("Package {} is modified ",e.getKey());
           changed.put(e.getKey(), e.getValue());
         }
       } else {
+        log.info("A new package: {} introduced", e.getKey());
         changed.put(e.getKey(), e.getValue());
       }
     }
     //some packages are deleted altogether
     for (String s : old.packages.keySet()) {
       if (!newPkgs.packages.keySet().contains(s)) {
+        log.info("Package: {} is removed althogether", s);
         changed.put(s, null);
       }
     }
@@ -122,10 +125,6 @@ public class PackageLoader {
 
   }
 
-  public SolrResourceLoader getResourceLoader(String pkg, String version) {
-    return null;
-  }
-
 
   public class Package {
     final String name;
@@ -148,6 +147,7 @@ public class PackageLoader {
       for (PackageAPI.PkgVersion v : modified) {
         Version version = myVersions.get(v.version);
         if (version == null) {
+          log.info("A new version: {} added for package: {} with artifacts {}", v.version,  this.name, v.files);
           myVersions.put(v.version, new Version(this, v));
           sortedVersions.add(v.version);
         }
@@ -159,6 +159,7 @@ public class PackageLoader {
       }
       for (String s : new HashSet<>(myVersions.keySet())) {
         if (!newVersions.contains(s)) {
+          log.info("version: {} is removed from package: {}", s,  this.name);
           sortedVersions.remove(s);
           myVersions.remove(s);
         }
@@ -166,8 +167,13 @@ public class PackageLoader {
 
       sortedVersions.sort(String::compareTo);
       if (sortedVersions.size() > 0) {
-        latest = sortedVersions.get(sortedVersions.size() - 1);
+        String latest = sortedVersions.get(sortedVersions.size() - 1);
+        if(!latest.equals(this.latest)){
+          log.info("version: {} is the new latest in package: {}", latest,  this.name);
+        }
+        this.latest = latest;
       } else {
+        log.error("latest version:  null");
         latest = null;
       }
 
@@ -225,7 +231,7 @@ public class PackageLoader {
 
         try {
           loader = new SolrResourceLoader(
-              "PACKAGE_LOADER:"+ parent.name()+ ":"+ version,
+              "PACKAGE_LOADER: "+ parent.name()+ ":"+ version,
               paths,
               coreContainer.getResourceLoader().getInstancePath(),
               coreContainer.getResourceLoader().getClassLoader());
@@ -244,11 +250,7 @@ public class PackageLoader {
 
       public SolrResourceLoader getLoader() {
         return loader;
-
       }
-
     }
   }
-
-
 }
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
index f0364c4..73e7f90 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
@@ -68,11 +68,19 @@ public class PackagePluginHolder<T> extends PluginBag.PluginHolder<T> {
 
 
   private synchronized void reload(PackageLoader.Package pkg) {
-    if(pkgVersion != null && aPackage.getLatest() == pkgVersion ) return;
+    if (pkgVersion != null && pkg.getLatest() == pkgVersion) {
+      //I'm already using the latest classloder in the package. nothing to do
+      return;
+    }
 
-    if (inst != null) log.info("reloading plugin {} ", pluginInfo.name);
     PackageLoader.Package.Version newest = pkg.getLatest();
-    if(newest == null) return;
+    if (newest == null){
+      log.error("No latest version available for package : {}", pkg.name());
+      return;
+    }
+    log.info("loading plugin: {} -> {} using  package {}:{}",
+        pluginInfo.type, pluginInfo.name, pkg.name(), newest.getVersion());
+
     Object instance = SolrCore.createInstance(pluginInfo.className,
         pluginMeta.clazz, pluginMeta.getCleanTag(), core, newest.getLoader());
     PluginBag.initInstance(instance, pluginInfo);
diff --git a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
index 5de69cf..6c0c036 100644
--- a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
+++ b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
@@ -68,6 +68,7 @@ public class TestPackages extends SolrCloudTestCase {
     try {
       String FILE1 = "/mypkg/runtimelibs.jar";
       String FILE2 = "/mypkg/runtimelibs_v2.jar";
+      String FILE3 = "/mypkg/runtimelibs_v3.jar";
       String COLLECTION_NAME = "testPluginLoadingColl";
       byte[] derFile = readFile("cryptokeys/pub_key512.der");
       cluster.getZkClient().makePath("/keys/exe", true);
@@ -149,7 +150,6 @@ public class TestPackages extends SolrCloudTestCase {
 
       //add the version using package API
       add.version = "1.1";
-      add.pkg = "mypkg";
       add.files = Arrays.asList(new String[]{FILE2});
       req.process(cluster.getSolrClient());
 
@@ -165,34 +165,77 @@ public class TestPackages extends SolrCloudTestCase {
           COLLECTION_NAME, "requestHandler", "/runtime",
           "mypkg", "1.1" );
 
-      /*executeReq( "/" + COLLECTION_NAME + "/get?wt=json", cluster.getRandomJetty(random()),
+      executeReq( "/" + COLLECTION_NAME + "/get?wt=json", cluster.getRandomJetty(random()),
           Utils.JSONCONSUMER,
-          Utils.makeMap("class", "org.apache.solr.core.RuntimeLibSearchComponent",
-              "Version","2"));
+          Utils.makeMap(  "Version","2"));
+
+
+      //now upload the third jar
+      postFileAndWait(cluster, "runtimecode/runtimelibs_v3.jar.bin", FILE3,
+          "a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q==");
+
+      add.version = "2.1";
+      add.files = Arrays.asList(new String[]{FILE3});
+      req.process(cluster.getSolrClient());
+
+      //now let's verify that the classes are updated
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "queryResponseWriter", "json1",
+          "mypkg", "2.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "searchComponent", "get",
+          "mypkg", "2.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "requestHandler", "/runtime",
+          "mypkg", "2.1" );
+
+      executeReq( "/" + COLLECTION_NAME + "/runtime?wt=json", cluster.getRandomJetty(random()),
+          Utils.JSONCONSUMER,
+          Utils.makeMap("Version","2"));
+
 
       PackageAPI.DelVersion delVersion = new PackageAPI.DelVersion();
       delVersion.pkg = "mypkg";
-      delVersion.version = "1.1";
-      new V2Request.Builder("/cluster/package")
+      delVersion.version = "1.0";
+      V2Request delete = new V2Request.Builder("/cluster/package")
           .withMethod(SolrRequest.METHOD.POST)
           .forceV2(true)
-          .withPayload(delVersion)
-          .build()
-          .process(cluster.getSolrClient());
+          .withPayload(Collections.singletonMap("delete", delVersion))
+          .build();
+      delete.process(cluster.getSolrClient());
 
       verifyCmponent(cluster.getSolrClient(),
           COLLECTION_NAME, "queryResponseWriter", "json1",
-          "mypkg", "1.0" );
+          "mypkg", "2.1" );
 
       verifyCmponent(cluster.getSolrClient(),
           COLLECTION_NAME, "searchComponent", "get",
-          "mypkg", "1.0" );
+          "mypkg", "2.1" );
 
       verifyCmponent(cluster.getSolrClient(),
           COLLECTION_NAME, "requestHandler", "/runtime",
-          "mypkg", "1.0" );
+          "mypkg", "2.1" );
+
+      delVersion.version = "2.1";
+      delete.process(cluster.getSolrClient());
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "queryResponseWriter", "json1",
+          "mypkg", "1.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "searchComponent", "get",
+          "mypkg", "1.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "requestHandler", "/runtime",
+          "mypkg", "1.1" );
+
+
+
 
-*/
     } finally {
       cluster.shutdown();
     }


[lucene-solr] 05/10: SOLR-13662: Package Manager (CLI) first cut

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 3ddc903b9ad5348733a01023be4d8ecb34d0d5f9
Author: Ishan Chattopadhyaya <is...@apache.org>
AuthorDate: Sat Oct 19 23:15:24 2019 +0530

    SOLR-13662: Package Manager (CLI) first cut
---
 lucene/ivy-versions.properties                     |   1 +
 solr/bin/solr                                      |  58 +++
 solr/core/ivy.xml                                  |   5 +
 .../apache/solr/packagemanager/SolrPackage.java    | 109 +++++
 .../solr/packagemanager/SolrPackageInstance.java   |  42 ++
 .../solr/packagemanager/SolrPackageManager.java    | 303 +++++++++++++
 .../solr/packagemanager/SolrPackageRepository.java | 107 +++++
 .../solr/packagemanager/SolrUpdateManager.java     | 501 +++++++++++++++++++++
 .../solr/packagemanager/pf4j/BasicVerifier.java    |  40 ++
 .../solr/packagemanager/pf4j/CompoundVerifier.java |  63 +++
 .../packagemanager/pf4j/DefaultVersionManager.java |  43 ++
 .../solr/packagemanager/pf4j/FileDownloader.java   |  39 ++
 .../solr/packagemanager/pf4j/FileVerifier.java     |  63 +++
 .../pf4j/PackageManagerException.java              |  50 ++
 .../packagemanager/pf4j/Sha512SumVerifier.java     |  83 ++++
 .../packagemanager/pf4j/SimpleFileDownloader.java  | 151 +++++++
 .../solr/packagemanager/pf4j/StringUtils.java      |  56 +++
 .../solr/packagemanager/pf4j/VerifyException.java  |  33 ++
 .../src/java/org/apache/solr/pkg/PackageAPI.java   |  13 +-
 .../src/java/org/apache/solr/util/PackageTool.java | 302 +++++++++++++
 .../src/java/org/apache/solr/util/SolrCLI.java     |   2 +
 .../apache/solr/cloud/PackageManagerCLITest.java   |  84 ++++
 22 files changed, 2147 insertions(+), 1 deletion(-)

diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 86cf9e2..729e3a5 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -36,6 +36,7 @@ com.fasterxml.jackson.core.version = 2.9.9
 /com.ibm.icu/icu4j = 62.1
 /com.jayway.jsonpath/json-path = 2.4.0
 /com.lmax/disruptor = 3.4.2
+/org.pf4j/pf4j-update = 2.1.0
 /com.pff/java-libpst = 0.8.1
 
 com.rometools.version = 1.5.1
diff --git a/solr/bin/solr b/solr/bin/solr
index 596242f..cb82610 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -764,6 +764,59 @@ function get_info() {
   return $CODE
 } # end get_info
 
+function run_package() {
+  runningSolrUrl=""
+
+  numSolrs=`find "$SOLR_PID_DIR" -name "solr-*.pid" -type f | wc -l | tr -d ' '`
+  if [ "$numSolrs" != "0" ]; then
+    echo -e "\nFound $numSolrs Solr nodes: "
+    while read PIDF
+      do
+        ID=`cat "$PIDF"`
+        port=`jetty_port "$ID"`
+        if [ "$port" != "" ]; then
+          echo -e "\nSolr process $ID running on port $port"
+          #run_tool status -solr "$SOLR_URL_SCHEME://$SOLR_TOOL_HOST:$port/solr"
+          runningSolrUrl="$SOLR_URL_SCHEME://$SOLR_TOOL_HOST:$port/solr"
+          break
+          CODE=$?
+          echo ""
+        else
+          echo -e "\nSolr process $ID from $PIDF not found."
+          CODE=1
+        fi
+    done < <(find "$SOLR_PID_DIR" -name "solr-*.pid" -type f)
+  else
+    # no pid files but check using ps just to be sure
+    numSolrs=`ps auxww | grep start\.jar | grep solr\.solr\.home | grep -v grep | wc -l | sed -e 's/^[ \t]*//'`
+    if [ "$numSolrs" != "0" ]; then
+      echo -e "\nFound $numSolrs Solr nodes: "
+      PROCESSES=$(ps auxww | grep start\.jar | grep solr\.solr\.home | grep -v grep | awk '{print $2}' | sort -r)
+      for ID in $PROCESSES
+        do
+          port=`jetty_port "$ID"`
+          if [ "$port" != "" ]; then
+            echo ""
+            echo "Solr process $ID running on port $port"
+            runningSolrUrl="$SOLR_URL_SCHEME://$SOLR_TOOL_HOST:$port/solr"
+            break
+            CODE=$?
+            echo ""
+          fi
+      done
+    else
+      echo -e "\nNo Solr nodes are running.\n"
+      exit 1
+      CODE=3
+    fi
+  fi
+
+  echo "Solr Base URL is $runningSolrUrl"
+  echo "Params: $@"
+  run_tool package -solrUrl "$runningSolrUrl" $@
+  #exit $?
+}
+
 # tries to gracefully stop Solr using the Jetty
 # stop command and if that fails, then uses kill -9
 function stop_solr() {
@@ -1359,6 +1412,11 @@ if [[ "$SCRIPT_CMD" == "export" ]]; then
   exit $?
 fi
 
+if [[ "$SCRIPT_CMD" == "package" ]]; then
+  run_package $@
+  exit $?
+fi
+
 if [[ "$SCRIPT_CMD" == "auth" ]]; then
 
   VERBOSE=""
diff --git a/solr/core/ivy.xml b/solr/core/ivy.xml
index 9fba663..4dab1df 100644
--- a/solr/core/ivy.xml
+++ b/solr/core/ivy.xml
@@ -138,6 +138,11 @@
     <dependency org="com.google.protobuf" name="protobuf-java" rev="${/com.google.protobuf/protobuf-java}" conf="compile"/>
     <dependency org="com.jayway.jsonpath" name="json-path" rev="${/com.jayway.jsonpath/json-path}" conf="compile"/>
 
+    <dependency org="com.github.zafarkhaja" name="java-semver" rev="0.9.0" conf="compile"/>
+    <dependency org="net.minidev" name="json-smart" rev="2.2.1" conf="compile"/>
+    <dependency org="net.minidev" name="asm" rev="1.0.2" conf="compile"/>
+
+
     <dependency org="org.rrd4j" name="rrd4j" rev="${/org.rrd4j/rrd4j}" conf="compile"/>
 
     <!-- JWT Auth plugin -->
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackage.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackage.java
new file mode 100644
index 0000000..8a48728
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackage.java
@@ -0,0 +1,109 @@
+package org.apache.solr.packagemanager;
+
+
+import java.io.Serializable;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * Describes a package (along with all released versions) as it appears in a repository.
+ */
+public class SolrPackage implements Serializable, Comparable<SolrPackage> {
+
+  public String id;
+  public String description;
+  public List<SolrPackageRelease> versions;
+
+  private String repositoryId;
+
+  public static class SolrPackageRelease {
+
+    public String version;
+    public Date date;
+    public String requires;
+    public String url;
+
+    public String sha512sum;
+    public String sig;
+
+    public Metadata metadata;
+    @Override
+    public String toString() {
+      return "SolrPackageRelease{" +
+          "version='" + version + '\'' +
+          ", date=" + date +
+          ", requires='" + requires + '\'' +
+          ", url='" + url + '\'' +
+          ", sig='" + sig + '\'' +
+          ", min='" + metadata.minSolrVersion + '\'' +
+          ", max='" + metadata.maxSolrVersion + '\'' +
+          ", dependencies='" + metadata.dependencies + '\'' +
+          ", plugins='" + metadata.plugins + '\'' +
+          ", paramDefaults='" + metadata.parameterDefaults + '\'' +
+          ", sha512sum='" + sha512sum + '\'' +
+          '}';
+    }
+  }
+
+  public static class Metadata {
+    @JsonProperty("min-solr-version")
+    public String minSolrVersion;
+    @JsonProperty("max-solr-version")
+    public String maxSolrVersion;
+
+    public List<String> dependencies;
+    public List<Plugin> plugins;
+    @JsonProperty("parameter-defaults")
+    public Map<String, String> parameterDefaults;
+  }
+
+  public static class Plugin {
+    public String id;
+    @JsonProperty("setup-command")
+    public String setupCommand;
+
+    @JsonProperty("update-command")
+    public String updateCommand;
+
+    @JsonProperty("uninstall-command")
+    public String uninstallCommand;
+
+    @JsonProperty("verify-command")
+    public Command verifyCommand;
+
+    @Override
+    public String toString() {
+      return id + ": {setup: "+setupCommand+", update: "+updateCommand+", uninstall: "+uninstallCommand+", verify: "+verifyCommand+"}";
+    }
+  }
+
+  @Override
+  public int compareTo(SolrPackage o) {
+    return id.compareTo(o.id);
+  }
+
+  public String getRepositoryId() {
+    return repositoryId;
+  }
+
+  public void setRepositoryId(String repositoryId) {
+    this.repositoryId = repositoryId;
+  }
+
+  public static class Command {
+    public String path;
+    public String method;
+    public Map payload;
+    public String condition;
+    public String expected;
+    
+    @Override
+      public String toString() {
+        return method + " " + path + ", Payload: "+ payload+", Condition: "+condition+", expected: "+expected;
+      }
+  }
+}
+
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageInstance.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageInstance.java
new file mode 100644
index 0000000..26f3774
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageInstance.java
@@ -0,0 +1,42 @@
+package org.apache.solr.packagemanager;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.packagemanager.SolrPackage.Plugin;
+
+/**
+ * Describes one instance of a package as it exists in Solr when installed.
+ */
+public class SolrPackageInstance {
+	final public String id;
+	final public String description;
+	final public String version;
+	final public List<Plugin> plugins;
+	final Map<String, String> parameterDefaults;
+	
+	public SolrPackageInstance(String id, String description, String version,
+	    List<Plugin> plugins, Map<String, String> params) {
+		this.id = id;
+		this.description = description;
+		this.version = version;
+		this.plugins = plugins;
+		this.parameterDefaults = params;
+	}
+
+	public String getPluginId() {
+		return id;
+	}
+
+	public String getPluginDescription() {
+		return description;
+	}
+
+	public String getVersion() {
+		return version;
+	}
+
+	public List<Plugin> getPlugins() {
+    return plugins;
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
new file mode 100644
index 0000000..e351a68
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageManager.java
@@ -0,0 +1,303 @@
+package org.apache.solr.packagemanager;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.http.HttpEntity;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.packagemanager.SolrPackage.Command;
+import org.apache.solr.packagemanager.SolrPackage.Metadata;
+import org.apache.solr.packagemanager.SolrPackage.Plugin;
+import org.apache.solr.packagemanager.pf4j.DefaultVersionManager;
+import org.apache.solr.packagemanager.pf4j.PackageManagerException;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.jayway.jsonpath.JsonPath;
+
+public class SolrPackageManager implements Closeable {
+
+  final DefaultVersionManager versionManager;
+
+  final String solrBaseUrl;
+  
+  final SolrZkClient zkClient;
+  public SolrPackageManager(File repo, String solrBaseUrl, String zkHost) {
+    versionManager = new DefaultVersionManager();
+    this.solrBaseUrl = solrBaseUrl;
+    this.zkClient = new SolrZkClient(zkHost, 30000);
+    System.out.println("Done initializing a zkClient instance...");
+  }
+
+  Map<String, List<SolrPackageInstance>> packages = null;
+
+  Metadata fetchMetadata(String manifestFilePath) throws MalformedURLException, IOException {
+    String metadataJson = getStringFromStream(solrBaseUrl + "/api/node/files"+manifestFilePath);
+    System.out.println("Fetched metadata blob: "+metadataJson);
+    Metadata metadata = new ObjectMapper().readValue(metadataJson, Metadata.class);
+    System.out.println("Now metadata: "+metadata);
+    return metadata;
+  }
+
+  public List<SolrPackageInstance> getPackages() throws PackageManagerException {
+    System.out.println("Getting packages from clusterprops...");
+    List<SolrPackageInstance> ret = new ArrayList<SolrPackageInstance>();
+    packages = new HashMap<String, List<SolrPackageInstance>>();
+    try {
+      /*String clusterPropsZnode = IOUtils.toString(new URL(solrBaseUrl + "/solr/admin/zookeeper?detail=true&path=/clusterprops.json&wt=json").openStream(), "UTF-8");
+      String clusterPropsJson = ((Map)new ObjectMapper().readValue(clusterPropsZnode, Map.class).get("znode")).get("data").toString();
+      Map packagesJson = (Map)new ObjectMapper().readValue(clusterPropsJson, Map.class).get("packages");*/
+      
+      String clusterPropsJson = null;
+      Map packagesJson = null;
+      
+      if (zkClient.exists("/packages.json", true) == true) {
+        clusterPropsJson = new String(zkClient.getData("/packages.json", null, null, true), "UTF-8");
+        System.out.println("clusterprops are: "+clusterPropsJson);
+        packagesJson = (Map)new ObjectMapper().
+            configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true).readValue(clusterPropsJson, Map.class).get("packages");
+      }
+
+      if (packagesJson != null) {
+        for (Object packageName: packagesJson.keySet()) {
+          List pkg = (List)packagesJson.get(packageName);
+          for (Map pkgVersion: (List<Map>)pkg) {
+            System.out.println("List mein yeh aaya hai: "+pkg); // nocommit don't blindly get .get(0)
+            Metadata metadata = fetchMetadata(pkgVersion.get("manifest").toString());
+            List<Plugin> solrplugins = metadata.plugins;
+            SolrPackageInstance pkgInstance = new SolrPackageInstance(packageName.toString(), null, 
+                pkgVersion.get("version").toString(), solrplugins, metadata.parameterDefaults);
+            List<SolrPackageInstance> list = packages.containsKey(packageName)? packages.get(packageName): new ArrayList<SolrPackageInstance>();
+            list.add(pkgInstance);
+            packages.put(packageName.toString(), list);
+            ret.add(pkgInstance);
+          }
+        }
+      }
+    } catch (Exception e) {
+      e.printStackTrace();
+      if (packages == null) packages = Collections.emptyMap(); // nocommit can't happen
+      throw new PackageManagerException(e);
+    }
+    System.out.println("Got packages: "+ret);
+    return ret;
+  }
+
+  public boolean deployInstallPackage(String packageName, String version, List<String> collections, String overrides[]) {
+    SolrPackageInstance pkg = getPackage(packageName, version);
+
+    for (String collection: collections) {
+      Map<String, String> collectionParameterOverrides = new HashMap<String,String>();
+      if (overrides != null) {
+        for (String override: overrides) {
+          collectionParameterOverrides.put(override.split("=")[0], override.split("=")[1]);
+        }
+      }
+      try {
+        // nocommit: it overwrites params of other packages (use set or update)
+        
+        boolean packageParamsExist = ((Map)((Map)new ObjectMapper().readValue(
+            get(solrBaseUrl + "/api/collections/abc/config/params/packages"), Map.class)
+            ).get("response")).containsKey("params");
+        
+        postJson(solrBaseUrl + "/api/collections/"+collection+"/config/params",
+            new ObjectMapper().writeValueAsString(
+                Map.of(packageParamsExist? "update": "set", 
+                    Map.of("packages", Map.of(packageName, collectionParameterOverrides)))));
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+
+      for (Plugin p: pkg.getPlugins()) {
+        System.out.println(p.setupCommand);
+
+        Map<String, String> systemParams = new HashMap<String,String>();
+        systemParams.put("collection", collection);
+        systemParams.put("package-name", pkg.id);
+        systemParams.put("package-version", pkg.version);
+
+        String cmd = resolve(p.setupCommand, pkg.parameterDefaults, collectionParameterOverrides, systemParams);
+        System.out.println("Executing " + cmd + " for collection:" + collection);
+        postJson(solrBaseUrl + "/solr/"+collection+"/config", cmd);
+      }
+    }
+
+    boolean success = verify(pkg, collections);
+    if (success) {
+      System.out.println("Deployed and verified package: "+pkg.id+", version: "+pkg.version);
+    }
+    return success;
+  }
+
+  private String resolve(String str, Map<String, String> defaults, Map<String, String> overrides, Map<String, String> systemParams) {
+    for (String param: defaults.keySet()) {
+      str = str.replaceAll("\\$\\{"+param+"\\}", overrides.containsKey(param)? overrides.get(param): defaults.get(param));
+    }
+    for (String param: overrides.keySet()) {
+      str = str.replaceAll("\\$\\{"+param+"\\}", overrides.get(param));
+    }
+    for (String param: systemParams.keySet()) {
+      str = str.replaceAll("\\$\\{"+param+"\\}", systemParams.get(param));
+    }
+    return str;
+  }
+  //nocommit should this be private?
+  public boolean verify(SolrPackageInstance pkg, List<String> collections) {
+    // verify deployment succeeded?
+    boolean success = true;
+    for (Plugin p: pkg.getPlugins()) {
+      System.out.println(p.verifyCommand);
+      for (String collection: collections) {
+        System.out.println("Executing " + p.verifyCommand + " for collection:" + collection);
+        Map<String, String> collectionParameterOverrides;
+        try {
+          collectionParameterOverrides = (Map<String, String>)((Map)((Map)((Map)new ObjectMapper().readValue
+              (get(solrBaseUrl + "/api/collections/abc/config/params/packages"), Map.class).get("response")).get("params")).get("packages")).get(pkg.id);
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+        
+        Command cmd = p.verifyCommand;
+
+        Map<String, String> systemParams = new HashMap<String,String>();
+        systemParams.put("collection", collection);
+        systemParams.put("package-name", pkg.id);
+        systemParams.put("package-version", pkg.version);
+        String url = solrBaseUrl + resolve(cmd.path, pkg.parameterDefaults, collectionParameterOverrides, systemParams);
+
+        if ("GET".equalsIgnoreCase(cmd.method)) {
+          String response = get(url);
+          System.out.println(response);
+          String actualValue = JsonPath.parse(response).read(resolve(cmd.condition, pkg.parameterDefaults, collectionParameterOverrides, systemParams));
+          String expectedValue = resolve(cmd.expected, pkg.parameterDefaults, collectionParameterOverrides, systemParams);
+          System.out.println("Actual: "+actualValue+", expected: "+expectedValue);
+          if (!expectedValue.equals(actualValue)) {
+            System.out.println("Failed to deploy plugin: "+p.id);
+            success = false;
+          }
+        } // commit POST?
+      }
+    }
+    return success;
+  }
+
+  public boolean deployUpdatePackage(String pluginId, String version, List<String> collections) {
+    SolrPackageInstance pkg = getPackage(pluginId, version);
+    for (Plugin p: pkg.getPlugins()) {
+
+      System.out.println(p.updateCommand);
+      for (String collection: collections) {
+        System.out.println("Executing " + p.updateCommand + " for collection:" + collection);
+        postJson(solrBaseUrl + "/solr/"+collection+"/config", p.updateCommand);
+      }
+    }
+    boolean success = verify(pkg, collections);
+    if (success) {
+      System.out.println("Deployed and verified package: "+pkg.id+", version: "+pkg.version);
+    }
+    return true;
+  }
+
+  String getStringFromStream(String url) {
+    return get(url);
+  }
+
+  private String get(String url) {
+    try (CloseableHttpClient client = SolrUpdateManager.createTrustAllHttpClientBuilder()) {
+      HttpGet httpGet = new HttpGet(url);
+      httpGet.setHeader("Content-type", "application/json");
+
+      CloseableHttpResponse response = client.execute(httpGet);
+
+      try {
+        HttpEntity rspEntity = response.getEntity();
+        if (rspEntity != null) {
+          InputStream is = rspEntity.getContent();
+          StringWriter writer = new StringWriter();
+          IOUtils.copy(is, writer, "UTF-8");
+          String results = writer.toString();
+
+          return(results);
+        }
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
+    } catch (Exception e1) {
+      throw new RuntimeException(e1);
+    }
+    return null;
+  }
+
+  private void postJson(String url, String postBody) {
+    System.out.println("Posting to "+url+": "+postBody);
+    try (CloseableHttpClient client = SolrUpdateManager.createTrustAllHttpClientBuilder();) {
+      HttpPost httpPost = new HttpPost(url);
+      StringEntity entity = new StringEntity(postBody);
+      httpPost.setEntity(entity);
+      httpPost.setHeader("Accept", "application/json");
+      httpPost.setHeader("Content-type", "application/json");
+
+      CloseableHttpResponse response = client.execute(httpPost);
+
+      try {
+        HttpEntity rspEntity = response.getEntity();
+        if (rspEntity != null) {
+          InputStream is = rspEntity.getContent();
+          StringWriter writer = new StringWriter();
+          IOUtils.copy(is, writer, "UTF-8");
+          String results = writer.toString();
+          System.out.println(results);
+        }
+      } catch (IOException e) {
+        e.printStackTrace();
+        throw new RuntimeException(e);
+      }
+    } catch (Exception e1) {
+      throw new RuntimeException(e1);
+    }
+  }
+
+  // nocommit: javadocs should mention that version==null or "latest" will return latest version installed
+  public SolrPackageInstance getPackage(String pluginId, String version) {
+    getPackages();
+    List<SolrPackageInstance> versions = packages.get(pluginId);
+    String latestVersion = "0.0.0";
+    SolrPackageInstance latest = null;
+    if (versions != null) {
+      for (SolrPackageInstance pkg: versions) {
+        if (pkg.version.equals(version)) {
+          return pkg;
+        }
+        if (versionManager.compareVersions(latestVersion, pkg.version) <= 0) {
+          latestVersion = pkg.version;
+          latest = pkg;
+        }
+      }
+    }
+    if (version == null || version.equalsIgnoreCase("latest")) {
+      return latest;
+    } else return null;
+  }
+
+  @Override
+  public void close() throws IOException {
+    if (zkClient != null) {
+      zkClient.close();
+    }
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageRepository.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageRepository.java
new file mode 100644
index 0000000..eaca740
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrPackageRepository.java
@@ -0,0 +1,107 @@
+package org.apache.solr.packagemanager;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.packagemanager.SolrPackage.SolrPackageRelease;
+import org.apache.solr.packagemanager.pf4j.CompoundVerifier;
+import org.apache.solr.packagemanager.pf4j.FileDownloader;
+import org.apache.solr.packagemanager.pf4j.FileVerifier;
+import org.apache.solr.packagemanager.pf4j.SimpleFileDownloader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class SolrPackageRepository {
+  private static final Logger log = LoggerFactory.getLogger(SolrPackageRepository.class);
+
+  @JsonProperty("id")
+  public String id;
+  @JsonProperty("url")
+  public String url;
+
+  public SolrPackageRepository() {
+  }//nocommit wtf?
+
+  public SolrPackageRepository(String id, String url) {
+    this.id = id;
+    this.url = url;
+  }
+
+  public void refresh() {
+    packages = null;
+  }
+
+  @JsonIgnore
+  public FileDownloader getFileDownloader() {
+      return new SimpleFileDownloader();
+  }
+
+  @JsonIgnore
+  public FileVerifier getFileVerfier() {
+      return new CompoundVerifier();
+  }
+
+  @JsonIgnore
+  private Map<String, SolrPackage> packages;
+
+  public Map<String, SolrPackage> getPackages() {
+    if (packages == null) {
+      initPackages();
+    }
+
+    return packages;
+  }
+
+  public SolrPackage getPlugin(String id) {
+    return getPackages().get(id);
+  }
+
+  private void initPackages() {
+    Reader pluginsJsonReader;
+    try {
+      URL pluginsUrl = new URL(new URL(url), "manifest.json"); //nocommit hardcoded
+      log.debug("Read plugins of '{}' repository from '{}'", id, pluginsUrl);
+      pluginsJsonReader = new InputStreamReader(pluginsUrl.openStream());
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      packages = Collections.emptyMap();
+      return;
+    }
+
+    ObjectMapper mapper = new ObjectMapper();
+    SolrPackage items[];
+    try {
+      items = mapper.readValue(pluginsJsonReader, SolrPackage[].class);
+    } catch (IOException e1) {
+      throw new RuntimeException(e1);
+    }
+    packages = new HashMap<>(items.length);
+    for (SolrPackage p : items) {
+      for (SolrPackageRelease r : p.versions) {
+        try {
+          r.url = new URL(new URL(url), r.url).toString();
+          if (r.date.getTime() == 0) {
+            log.warn("Illegal release date when parsing {}@{}, setting to epoch", p.id, r.version);
+          }
+        } catch (MalformedURLException e) {
+          log.warn("Skipping release {} of plugin {} due to failure to build valid absolute URL. Url was {}{}", r.version, p.id, url, r.url);
+        }
+      }
+      p.setRepositoryId(id);
+      packages.put(p.id, p);
+      
+      System.out.println("****\n"+p+"\n*******");
+    }
+    log.debug("Found {} plugins in repository '{}'", packages.size(), id);
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrUpdateManager.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrUpdateManager.java
new file mode 100644
index 0000000..c4abf5c
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrUpdateManager.java
@@ -0,0 +1,501 @@
+package org.apache.solr.packagemanager;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.conn.ssl.NoopHostnameVerifier;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.conn.ssl.SSLContextBuilder;
+import org.apache.http.entity.mime.MultipartEntity;
+import org.apache.http.entity.mime.content.FileBody;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.client.solrj.response.V2Response;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.packagemanager.SolrPackage.SolrPackageRelease;
+import org.apache.solr.packagemanager.pf4j.CompoundVerifier;
+import org.apache.solr.packagemanager.pf4j.DefaultVersionManager;
+import org.apache.solr.packagemanager.pf4j.FileDownloader;
+import org.apache.solr.packagemanager.pf4j.FileVerifier;
+import org.apache.solr.packagemanager.pf4j.PackageManagerException;
+import org.apache.solr.packagemanager.pf4j.SimpleFileDownloader;
+import org.apache.solr.pkg.PackageAPI;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class SolrUpdateManager {
+
+  final private SolrPackageManager packageManager;
+  final private String repositoriesJsonStr;
+  protected List<SolrPackageRepository> repositories;
+  
+  private DefaultVersionManager versionManager;
+  private String systemVersion;
+  private Map<String, SolrPackageRelease> lastPluginRelease = new HashMap<>();
+
+  final String solrBaseUrl;
+
+  private static final Logger log = LoggerFactory.getLogger(SolrUpdateManager.class);
+
+  public SolrUpdateManager(SolrPackageManager pluginManager, String repositoriesJsonStr, String solrBaseUrl) {
+    this.packageManager = pluginManager;
+    this.repositoriesJsonStr = repositoriesJsonStr;
+    versionManager = new DefaultVersionManager();
+    systemVersion = "0.0.0";
+    this.solrBaseUrl = solrBaseUrl;
+  }
+
+  protected synchronized void initRepositoriesFromJson() {
+    SolrPackageRepository items[];
+    try {
+      items = new ObjectMapper().readValue(this.repositoriesJsonStr, SolrPackageRepository[].class);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    this.repositories = Arrays.asList(items);
+  }
+
+  public synchronized void refresh() {
+    initRepositoriesFromJson();
+    for (SolrPackageRepository updateRepository : repositories) {
+      updateRepository.refresh();
+    }
+  }
+
+
+  public synchronized boolean installPackage(String id, String version) throws PackageManagerException {
+    return updateOrInstallPackage(Operation.INSTALL, id, version);
+  }
+
+  public synchronized boolean updatePackage(String id, String version) throws PackageManagerException {
+    return updateOrInstallPackage(Operation.UPDATE, id, version);
+  }
+  
+  // nocommit do we need this, when we have a map version of this?
+  public List<SolrPackage> getPackages() {
+    List<SolrPackage> list = new ArrayList<>(getPackagesMap().values());
+    Collections.sort(list);
+
+    return list;
+  }
+  
+  public Map<String, SolrPackage> getPackagesMap() {
+    Map<String, SolrPackage> packagesMap = new HashMap<>();
+    for (SolrPackageRepository repository : getRepositories()) {
+      packagesMap.putAll(repository.getPackages());
+    }
+
+    return packagesMap;
+  }
+  
+  public List<SolrPackageRepository> getRepositories() {
+    refresh();
+    return repositories;
+  }
+
+  private boolean updateOrInstallPackage(Operation op, String id, String version) throws PackageManagerException {
+    /*Path downloaded = downloadPackage(id, version);
+
+    SolrPackageInstance existingPlugin = packageManager.getPackage(id);
+    if (existingPlugin != null && version.equals(existingPlugin.getVersion())) {
+      throw new PackageManagerException("Plugin already installed.");
+    }
+
+    SolrPackageRelease release = null;
+    String repository = null;
+    for (SolrPackage pkg: getPackages()) {
+      if (id.equals(pkg.id)) {
+        for (SolrPackageRelease r: pkg.versions) {
+          if (version.equals(r.version) ) {
+            release = r;
+            repository = pkg.getRepositoryId();
+            break;
+          }
+        }
+      }
+    }
+
+    if (release == null) {
+      throw new PackageManagerException("Couldn't find the release..");
+    }
+
+    String sha256 = uploadToBlobHandler(downloaded);
+    String metadataSha256;
+    try {
+      metadataSha256 = uploadToBlobHandler(new ObjectMapper().writeValueAsString(release.metadata));
+    } catch (IOException e) {
+      throw new PackageManagerException(e);
+    }
+
+    addOrUpdatePackage(op, id, version, sha256, repository, release.sig, metadataSha256, release.metadata);
+    
+    return true;*/
+    
+    //postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"),
+    //    "/package/mypkg/v1.0/runtimelibs.jar",
+    //    "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=="
+    //);
+
+    SolrPackageInstance existingPlugin = packageManager.getPackage(id, version);
+    if (existingPlugin != null && version.equals(existingPlugin.getVersion())) {
+      throw new PackageManagerException("Plugin already installed.");
+    }
+
+    SolrPackage pkg = getPackagesMap().get(id);
+    SolrPackageRelease release = findReleaseForPlugin(id, version);
+    Path downloaded = downloadPackage(id, version);
+    System.out.println("Yahaan file hai: "+downloaded);
+    System.out.println("Signature: "+release.sig);
+    System.out.println("Filename: "+downloaded.getFileName().toString());
+    
+    try (HttpSolrClient solrClient = new HttpSolrClient.Builder(solrBaseUrl).build()) {
+      // post the metadata
+      System.out.println("Posting metadata");
+      postFile(solrClient, ByteBuffer.wrap(new ObjectMapper().writeValueAsString(release.metadata).getBytes()),
+          "/package/"+id+"/"+version+"/solr-manifest.json",
+          null);
+
+      // post the artifacts
+      System.out.println("Posting artifacts");
+      postFile(solrClient, getFileContent(downloaded.toFile()),
+          "/package/"+id+"/"+version+"/"+downloaded.getFileName().toString(),
+          release.sig
+      );
+      
+      addOrUpdatePackage(op, solrClient, id, version, new String[] {"/package/"+id+"/"+version+"/"+downloaded.getFileName().toString()}, 
+          pkg.getRepositoryId(), release.sig, "/package/"+id+"/"+version+"/solr-manifest.json", null);
+    } catch (SolrServerException | IOException e) {
+      throw new PackageManagerException(e);
+    }
+    return false;
+  }
+  
+  public static ByteBuffer getFileContent(File file) throws IOException {
+    ByteBuffer jar;
+    try (FileInputStream fis = new FileInputStream(file)) {
+      byte[] buf = new byte[fis.available()];
+      fis.read(buf);
+      jar = ByteBuffer.wrap(buf);
+    }
+    return jar;
+  }
+
+  public static void postFile(SolrClient client, ByteBuffer buffer, String name, String sig)
+      throws SolrServerException, IOException {
+    String resource = "/api/cluster/files" + name;
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    if (sig != null) {
+      params.add("sig", sig);
+    }
+    V2Response rsp = new V2Request.Builder(resource)
+        .withMethod(SolrRequest.METHOD.PUT)
+        .withPayload(buffer)
+        .forceV2(true)
+        .withMimeType("application/octet-stream")
+        .withParams(params)
+        .build()
+        .process(client);
+    if (!name.equals(rsp.getResponse().get(CommonParams.FILE))) {
+      throw new PackageManagerException("Mismatch in file uploaded. Uploaded: " +
+          rsp.getResponse().get(CommonParams.FILE)+", Original: "+name);
+    }
+  }
+
+  public static enum Operation {
+    INSTALL, UPDATE;
+  }
+  
+  private boolean addOrUpdatePackage(Operation op, SolrClient solrClient, String id, String version, String files[], String repository, String sig,
+      String manifest, String manifestSHA512) {
+    
+    /*String json;
+    
+    if (op.equals(Operation.INSTALL)) {
+      json = "{add: ";
+    } else {
+      json = "{update: ";
+    }
+    json = json
+        + "{name: '"+id+"', "
+        + "version: '"+version+"', "
+        + "repository: '"+repository+"', "
+        + "blob: {sha256: '"+sha256+"', sig: '"+sig+"'}, "
+        + "metadata: '"+metadataSha256+"'"
+        + "}}";
+
+    System.out.println("Posting package: "+json);
+    try (CloseableHttpClient client = createTrustAllHttpClientBuilder()) {
+      HttpPost httpPost = new HttpPost(solrBaseUrl + "/api/cluster/package");
+      StringEntity entity = new StringEntity(json);
+      httpPost.setEntity(entity);
+      httpPost.setHeader("Accept", "application/json");
+      httpPost.setHeader("Content-type", "application/json");
+
+      try (CloseableHttpResponse response = client.execute(httpPost)) {
+        HttpEntity rspEntity = response.getEntity();
+        if (rspEntity != null) {
+          InputStream is = rspEntity.getContent();
+          StringWriter writer = new StringWriter();
+          IOUtils.copy(is, writer, "UTF-8");
+          String results = writer.toString();
+          System.out.println(results);
+        }
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
+    } catch (Exception e) {
+      e.printStackTrace();
+    }*/
+    
+    PackageAPI.AddVersion add = new PackageAPI.AddVersion();
+    add.version = version;
+    add.pkg = id;
+    add.files = Arrays.asList(files);
+    add.manifest = manifest;
+    add.manifestSHA512 = "MY_MANIFEST_SHA512";
+
+    V2Request req = new V2Request.Builder("/api/cluster/package")
+        .forceV2(true)
+        .withMethod(SolrRequest.METHOD.POST)
+        .withPayload(Collections.singletonMap("add", add))
+        .build();
+
+    try {
+      V2Response resp = req.process(solrClient);
+      System.out.println("Response: "+resp.jsonStr());
+    } catch (SolrServerException | IOException e) {
+      throw new PackageManagerException(e);
+    }
+
+    return true;
+  }
+
+  public static CloseableHttpClient createTrustAllHttpClientBuilder() throws Exception {
+    SSLContextBuilder builder = new SSLContextBuilder();
+    builder.loadTrustMaterial(null, (chain, authType) -> true);           
+    SSLConnectionSocketFactory sslsf = new 
+    SSLConnectionSocketFactory(builder.build(), NoopHostnameVerifier.INSTANCE);
+    return HttpClients.custom().setSSLSocketFactory(sslsf).build();
+  }
+  
+  private String uploadToBlobHandler(Path downloaded) throws PackageManagerException {
+    String url = solrBaseUrl + "/api/cluster/blob";
+    File file = downloaded.toFile();
+    try (CloseableHttpClient client = createTrustAllHttpClientBuilder()) { //HttpClients.createDefault();) {
+      HttpPost post = new HttpPost(url);
+
+      MultipartEntity entity = new MultipartEntity();
+      entity.addPart("file", new FileBody(file));
+      post.setEntity(entity);
+
+      try {
+        HttpResponse response = client.execute(post);
+
+        HttpEntity rspEntity = response.getEntity();
+        if (rspEntity != null) {
+          InputStream is = rspEntity.getContent();
+          StringWriter writer = new StringWriter();
+          IOUtils.copy(is, writer, "UTF-8");
+          String results = writer.toString();
+          System.out.println(results);
+          String sha = new ObjectMapper().readValue(results, Map.class).get("sha256").toString();
+          //System.out.println("SHA: "+sha);
+          return sha;
+        }
+      } catch (IOException e) {
+        // TODO Auto-generated catch block
+        throw e;
+      }
+    } catch (Exception e1) {
+      // TODO Auto-generated catch block
+      e1.printStackTrace();
+      throw new PackageManagerException(e1);
+    }
+    return null;
+  }
+  
+  private String uploadToBlobHandler(String json) throws IOException, PackageManagerException {
+    System.out.println("Trying to upload the blob: "+json);
+    FileUtils.writeStringToFile(new File("tmp-metadata"), json);
+    return uploadToBlobHandler(new File("tmp-metadata").toPath());
+  }
+
+  /**
+   * Downloads a plugin with given coordinates, runs all {@link FileVerifier}s
+   * and returns a path to the downloaded file.
+   *
+   * @param id of plugin
+   * @param version of plugin or null to download latest
+   * @return Path to file which will reside in a temporary folder in the system default temp area
+   * @throws PackageManagerException if download failed
+   */
+  protected Path downloadPackage(String id, String version) throws PackageManagerException {
+      try {
+          SolrPackageRelease release = findReleaseForPlugin(id, version);
+          Path downloaded = getFileDownloader(id).downloadFile(new URL(release.url));
+          //getFileVerifier(id).verify(new FileVerifier.Context(id, release), downloaded);
+          //nocommit verify this download
+          return downloaded;
+      } catch (IOException e) {
+          throw new PackageManagerException(e, "Error during download of plugin {}", id);
+      }
+  }
+
+  /**
+   * Finds the {@link FileDownloader} to use for this repository.
+   *
+   * @param pluginId the plugin we wish to download
+   * @return FileDownloader instance
+   */
+  protected FileDownloader getFileDownloader(String pluginId) {
+      for (SolrPackageRepository ur : repositories) {
+          if (ur.getPlugin(pluginId) != null && ur.getFileDownloader() != null) {
+              return ur.getFileDownloader();
+          }
+      }
+
+      return new SimpleFileDownloader();
+  }
+
+  /**
+   * Gets a file verifier to use for this plugin. First tries to use custom verifier
+   * configured for the repository, then fallback to the default CompoundVerifier
+   *
+   * @param pluginId the plugin we wish to download
+   * @return FileVerifier instance
+   */
+  protected FileVerifier getFileVerifier(String pluginId) {
+      for (SolrPackageRepository ur : repositories) {
+          if (ur.getPlugin(pluginId) != null && ur.getFileVerfier() != null) {
+              return ur.getFileVerfier();
+          }
+      }
+
+      return new CompoundVerifier();
+  }
+  
+  /**
+   * Resolves Release from id and version.
+   *
+   * @param id of plugin
+   * @param version of plugin or null to locate latest version
+   * @return PluginRelease for downloading
+   * @throws PackageManagerException if id or version does not exist
+   */
+  public SolrPackageRelease findReleaseForPlugin(String id, String version) throws PackageManagerException {
+      SolrPackage pluginInfo = getPackagesMap().get(id);
+      if (pluginInfo == null) {
+          log.info("Plugin with id {} does not exist in any repository", id);
+          throw new PackageManagerException("Plugin with id {} not found in any repository", id);
+      }
+
+      if (version == null) {
+          return getLastPackageRelease(id);
+      }
+
+      for (SolrPackageRelease release : pluginInfo.versions) {
+          if (versionManager.compareVersions(version, release.version) == 0 && release.url != null) {
+              return release;
+          }
+      }
+
+      throw new PackageManagerException("Plugin {} with version @{} does not exist in the repository", id, version);
+  }
+  
+  /**
+   * Returns the last release version of this plugin for given system version, regardless of release date.
+   *
+   * @return PluginRelease which has the highest version number
+   */
+  public SolrPackageRelease getLastPackageRelease(String id) {
+      SolrPackage pluginInfo = getPackagesMap().get(id);
+      if (pluginInfo == null) {
+          return null;
+      }
+
+      if (!lastPluginRelease.containsKey(id)) {
+          for (SolrPackageRelease release : pluginInfo.versions) {
+              if (systemVersion.equals("0.0.0") || versionManager.checkVersionConstraint(systemVersion, release.requires)) {
+                  if (lastPluginRelease.get(id) == null) {
+                      lastPluginRelease.put(id, release);
+                  } else if (versionManager.compareVersions(release.version, lastPluginRelease.get(id).version) > 0) {
+                      lastPluginRelease.put(id, release);
+                  }
+              }
+          }
+      }
+
+      return lastPluginRelease.get(id);
+  }
+  
+  /**
+   * Finds whether the newer version of the plugin.
+   *
+   * @return true if there is a newer version available which is compatible with system
+   */
+  public boolean hasPluginUpdate(String id) {
+      SolrPackage pluginInfo = getPackagesMap().get(id);
+      if (pluginInfo == null) {
+          return false;
+      }
+
+      String installedVersion = packageManager.getPackage(id, null).getVersion();
+      SolrPackageRelease last = getLastPackageRelease(id);
+
+      return last != null && versionManager.compareVersions(last.version, installedVersion) > 0;
+  }
+
+  
+  /**
+   * Return a list of plugins that are newer versions of already installed plugins.
+   *
+   * @return list of plugins that have updates
+   */
+  public List<SolrPackage> getUpdates() {
+      List<SolrPackage> updates = new ArrayList<>();
+      for (SolrPackageInstance installed : packageManager.getPackages()) {
+          String pluginId = installed.getPluginId();
+          if (hasPluginUpdate(pluginId)) {
+              updates.add(getPackagesMap().get(pluginId));
+          }
+      }
+
+      return updates;
+  }
+
+  /**
+   * Checks if Update Repositories has newer versions of some of the installed plugins.
+   *
+   * @return true if updates exist
+   */
+  public boolean hasUpdates() {
+      return getUpdates().size() > 0;
+  }
+
+
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/BasicVerifier.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/BasicVerifier.java
new file mode 100644
index 0000000..f912b06
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/BasicVerifier.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2012-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+/**
+ * Verifies that the file exists is a regular file and has a non-null size
+ */
+public class BasicVerifier implements FileVerifier {
+    /**
+     * Verifies a plugin release according to certain rules
+     *
+     * @param context the file verifier context object
+     * @param file    the path to the downloaded file itself
+     * @throws IOException     if there was a problem accessing file
+     * @throws VerifyException in case of problems verifying the file
+     */
+    @Override
+    public void verify(Context context, Path file) throws VerifyException, IOException {
+        if (!Files.isRegularFile(file) || Files.size(file) == 0) {
+            throw new VerifyException("File {} is not a regular file or has size 0", file);
+        }
+    }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/CompoundVerifier.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/CompoundVerifier.java
new file mode 100644
index 0000000..481ade7
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/CompoundVerifier.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2012-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class CompoundVerifier implements FileVerifier {
+    /**
+     * Default list of verifiers
+     */
+    public static final List<FileVerifier> ALL_DEFAULT_FILE_VERIFIERS = Arrays.asList(
+                new BasicVerifier(),
+                new Sha512SumVerifier());
+
+    private List<FileVerifier> verifiers = new ArrayList<>();
+
+    /**
+     * Default constructor which will add the default verifiers to start with
+     */
+    public CompoundVerifier() {
+        setVerifiers(ALL_DEFAULT_FILE_VERIFIERS);
+    }
+
+    /**
+     * Constructs a Compound verifier using the supplied list of verifiers instead of the default ones
+     * @param verifiers the list of verifiers to apply
+     */
+    public CompoundVerifier(List<FileVerifier> verifiers) {
+        this.verifiers = verifiers;
+    }
+
+    @Override
+    public void verify(Context context, Path file) throws IOException, VerifyException {
+        for (FileVerifier verifier : getVerifiers()) {
+            verifier.verify(context, file);
+        }
+    }
+
+    public List<FileVerifier> getVerifiers() {
+        return verifiers;
+    }
+
+    public void setVerifiers(List<FileVerifier> verifiers) {
+        this.verifiers = verifiers;
+    }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/DefaultVersionManager.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/DefaultVersionManager.java
new file mode 100644
index 0000000..8ccce47
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/DefaultVersionManager.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2017 Decebal Suiu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+import com.github.zafarkhaja.semver.Version;
+import com.github.zafarkhaja.semver.expr.Expression;
+
+/**
+ * This implementation uses jSemVer (a Java implementation of the SemVer Specification).
+ *
+ * @author Decebal Suiu
+ */
+public class DefaultVersionManager {
+
+    /**
+     * Checks if a version satisfies the specified SemVer {@link Expression} string.
+     * If the constraint is empty or null then the method returns true.
+     * Constraint examples: {@code >2.0.0} (simple), {@code ">=1.4.0 & <1.6.0"} (range).
+     * See https://github.com/zafarkhaja/jsemver#semver-expressions-api-ranges for more info.
+     *
+     */
+    public boolean checkVersionConstraint(String version, String constraint) {
+        return StringUtils.isNullOrEmpty(constraint) || Version.valueOf(version).satisfies(constraint);
+    }
+
+    public int compareVersions(String v1, String v2) {
+        return Version.valueOf(v1).compareTo(Version.valueOf(v2));
+    }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/FileDownloader.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/FileDownloader.java
new file mode 100644
index 0000000..32f6fe4
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/FileDownloader.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2012-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+
+import java.io.IOException;
+import java.net.URL;
+import java.nio.file.Path;
+
+/**
+ * Interface to download a file.
+ */
+public interface FileDownloader {
+
+    /**
+     * Downloads a file to destination. The implementation should download to a temporary folder.
+     * Implementations may choose to support different protocols such as http, https, ftp, file...
+     * The path returned must be of temporary nature and will most probably be moved/deleted by consumer.
+     *
+     * @param fileUrl the URL representing the file to download
+     * @return Path of downloaded file, typically in a temporary folder
+     * @throws IOException if there was an IO problem during download
+     * @throws PackageManagerException in case of other problems, such as unsupported protocol
+     */
+    Path downloadFile(URL fileUrl) throws PackageManagerException, IOException;
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/FileVerifier.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/FileVerifier.java
new file mode 100644
index 0000000..5f755aa
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/FileVerifier.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2012-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.solr.packagemanager.SolrPackage;
+
+/**
+ * Interface to verify a file.
+ */
+public interface FileVerifier {
+
+    void verify(Context context, Path file) throws IOException, VerifyException;
+
+    /**
+     * Context to be passed to file verifiers
+     */
+    class Context {
+        public String id;
+        public Date date;
+        public String version;
+        public String requires;
+        public String url;
+        public String sha512sum;
+        public Map<String,Object> meta = new HashMap<>();
+
+        public Context(String id, SolrPackage.SolrPackageRelease pluginRelease) {
+            this.id = id;
+            this.date = pluginRelease.date;
+            this.version = pluginRelease.version;
+            this.requires = pluginRelease.requires;
+            this.url = pluginRelease.url;
+            this.sha512sum = pluginRelease.sha512sum;
+        }
+
+        public Context(String id, Date date, String version, String requires, String url, String sha512sum) {
+            this.id = id;
+            this.date = date;
+            this.version = version;
+            this.requires = requires;
+            this.url = url;
+            this.sha512sum = sha512sum;
+        }
+    }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/PackageManagerException.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/PackageManagerException.java
new file mode 100644
index 0000000..76fb761
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/PackageManagerException.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2012 Decebal Suiu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+
+/**
+ * An exception used to indicate that a plugin problem occurred.
+ *
+ * @author Decebal Suiu
+ */
+public class PackageManagerException extends RuntimeException {
+
+	public PackageManagerException() {
+        super();
+    }
+
+    public PackageManagerException(String message) {
+        super(message);
+    }
+
+    public PackageManagerException(Throwable cause) {
+        super(cause);
+    }
+
+    public PackageManagerException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+    public PackageManagerException(Throwable cause, String message, Object... args) {
+        super(StringUtils.format(message, args), cause);
+    }
+
+    public PackageManagerException(String message, Object... args) {
+        super(StringUtils.format(message, args));
+    }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/Sha512SumVerifier.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/Sha512SumVerifier.java
new file mode 100644
index 0000000..26f2535
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/Sha512SumVerifier.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2012-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.lang.invoke.MethodHandles;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+/**
+ * Verifies that the SHA512 checksum of a downloaded file equals the checksum given in
+ * the plugins.json descriptor. This helps validate that the file downloaded is exactly
+ * the same as intended. Especially useful when dealing with meta repositories pointing
+ * to S3 or other 3rd party download locations that could have been tampered with.
+ */
+public class Sha512SumVerifier implements FileVerifier {
+    private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+    /**
+     * Verifies a plugin release according to certain rules
+     *
+     * @param context the file verifier context object
+     * @param file    the path to the downloaded file itself
+     * @throws IOException     if there was a problem accessing file
+     * @throws VerifyException in case of problems verifying the file
+     */
+    @Override
+    public void verify(Context context, Path file) throws VerifyException, IOException {
+        String expectedSha512sum;
+        try {
+            if (context.sha512sum == null) {
+                log.debug("No sha512 checksum specified, skipping verification");
+                return;
+            } else if (context.sha512sum.equalsIgnoreCase(".sha512")) {
+                String url = context.url.substring(0, context.url.lastIndexOf(".")) + ".sha512";
+                expectedSha512sum = getUrlContents(url).split(" ")[0].trim();
+            } else if (context.sha512sum.startsWith("http")) {
+                expectedSha512sum = getUrlContents(context.sha512sum).split(" ")[0].trim();
+            } else {
+                expectedSha512sum = context.sha512sum;
+            }
+        } catch (IOException e) {
+            throw new VerifyException(e, "SHA512 checksum verification failed, could not download SHA512 file ({})", context.sha512sum);
+        }
+
+        log.debug("Verifying sha512 checksum of file {}", file.getFileName());
+        String actualSha512sum = DigestUtils.sha512Hex(Files.newInputStream(file));
+        if (actualSha512sum.equalsIgnoreCase(expectedSha512sum)) {
+            log.debug("Checksum OK");
+            return;
+        }
+        throw new VerifyException("SHA512 checksum of downloaded file " + file.getFileName()
+                + " does not match that from plugin descriptor. Got " + actualSha512sum
+                + " but expected " + expectedSha512sum);
+    }
+
+    private String getUrlContents(String url) throws IOException {
+        try (BufferedReader reader = new BufferedReader(new InputStreamReader(
+                new URL(url).openStream()))) {
+            return reader.readLine();
+        }
+    }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/SimpleFileDownloader.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/SimpleFileDownloader.java
new file mode 100644
index 0000000..5b7c3ae
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/SimpleFileDownloader.java
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2012-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ConnectException;
+import java.net.HttpURLConnection;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLConnection;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.attribute.FileTime;
+
+/**
+ * Downloads a file from a URL.
+ *
+ * @author Decebal Suiu
+ */
+public class SimpleFileDownloader implements FileDownloader {
+
+    private static final Logger log = LoggerFactory.getLogger(SimpleFileDownloader.class);
+
+    /**
+     * Downloads a file. If HTTP(S) or FTP, stream content, if local file:/ do a simple filesystem copy to tmp folder.
+     * Other protocols not supported.
+     *
+     * @param fileUrl the URI representing the file to download
+     * @return the path of downloaded/copied file
+     * @throws IOException in case of network or IO problems
+     * @throws PackageManagerException in case of other problems
+     */
+    public Path downloadFile(URL fileUrl) throws PackageManagerException, IOException {
+        switch (fileUrl.getProtocol()) {
+            case "http":
+            case "https":
+            case "ftp":
+                return downloadFileHttp(fileUrl);
+            case "file":
+                return copyLocalFile(fileUrl);
+            default:
+                throw new PackageManagerException("URL protocol {} not supported", fileUrl.getProtocol());
+        }
+    }
+
+    /**
+     * Efficient copy of file in case of local file system.
+     *
+     * @param fileUrl source file
+     * @return path of target file
+     * @throws IOException if problems during copy
+     * @throws PackageManagerException in case of other problems
+     */
+    protected Path copyLocalFile(URL fileUrl) throws IOException, PackageManagerException {
+        Path destination = Files.createTempDirectory("pf4j-update-downloader");
+        destination.toFile().deleteOnExit();
+
+        try {
+            Path fromFile = Paths.get(fileUrl.toURI());
+            String path = fileUrl.getPath();
+            String fileName = path.substring(path.lastIndexOf('/') + 1);
+            Path toFile = destination.resolve(fileName);
+            Files.copy(fromFile, toFile, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING);
+
+            return toFile;
+        } catch (URISyntaxException e) {
+            throw new PackageManagerException("Something wrong with given URL", e);
+        }
+    }
+
+    /**
+     * Downloads file from HTTP or FTP.
+     *
+     * @param fileUrl source file
+     * @return path of downloaded file
+     * @throws IOException if IO problems
+     * @throws PackageManagerException if validation fails or any other problems
+     */
+    protected Path downloadFileHttp(URL fileUrl) throws IOException, PackageManagerException {
+        Path destination = Files.createTempDirectory("pf4j-update-downloader");
+        destination.toFile().deleteOnExit();
+
+        String path = fileUrl.getPath();
+        String fileName = path.substring(path.lastIndexOf('/') + 1);
+        Path file = destination.resolve(fileName);
+
+        // set up the URL connection
+        URLConnection connection = fileUrl.openConnection();
+
+        // connect to the remote site (may takes some time)
+        connection.connect();
+
+        // check for http authorization
+        HttpURLConnection httpConnection = (HttpURLConnection) connection;
+        if (httpConnection.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
+            throw new ConnectException("HTTP Authorization failure");
+        }
+
+        // try to get the server-specified last-modified date of this artifact
+        long lastModified = httpConnection.getHeaderFieldDate("Last-Modified", System.currentTimeMillis());
+
+        // try to get the input stream (three times)
+        InputStream is = null;
+        for (int i = 0; i < 3; i++) {
+            try {
+                is = connection.getInputStream();
+                break;
+            } catch (IOException e) {
+                log.error(e.getMessage(), e);
+            }
+        }
+        if (is == null) {
+            throw new ConnectException("Can't get '" + fileUrl + " to '" + file + "'");
+        }
+
+        // reade from remote resource and write to the local file
+        FileOutputStream fos = new FileOutputStream(file.toFile());
+        byte[] buffer = new byte[1024];
+        int length;
+        while ((length = is.read(buffer)) >= 0) {
+            fos.write(buffer, 0, length);
+        }
+        fos.close();
+        is.close();
+
+        log.debug("Set last modified of '{}' to '{}'", file, lastModified);
+        Files.setLastModifiedTime(file, FileTime.fromMillis(lastModified));
+
+        return file;
+    }
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/StringUtils.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/StringUtils.java
new file mode 100644
index 0000000..a1a3ad3
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/StringUtils.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2012 Decebal Suiu
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+/**
+ * @author Decebal Suiu
+ */
+public class StringUtils {
+
+	public static boolean isNullOrEmpty(String str) {
+		return (str == null) || str.isEmpty();
+	}
+
+    public static boolean isNotNullOrEmpty(String str) {
+        return !isNullOrEmpty(str);
+    }
+
+    /**
+     * Format the string. Replace "{}" with %s and format the string using {@link String#format(String, Object...)}.
+     */
+    public static String format(String str, Object... args) {
+        str = str.replaceAll("\\{}", "%s");
+
+        return String.format(str, args);
+    }
+
+    public static String addStart(String str, String add) {
+        if (isNullOrEmpty(add)) {
+            return str;
+        }
+
+        if (isNullOrEmpty(str)) {
+            return add;
+        }
+
+        if (!str.startsWith(add)) {
+            return add + str;
+        }
+
+        return str;
+    }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/pf4j/VerifyException.java b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/VerifyException.java
new file mode 100644
index 0000000..e893963
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/packagemanager/pf4j/VerifyException.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2012-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.packagemanager.pf4j;
+
+/**
+ * Marker exception for plugin verification failure
+ */
+public class VerifyException extends PackageManagerException {
+    public VerifyException(String message) {
+        super(message);
+    }
+
+    public VerifyException(Throwable cause, String message, Object... args) {
+        super(cause, message, args);
+    }
+
+    public VerifyException(String message, Object... args) {
+        super(message, args);
+    }
+}
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index 4bd822e..fa9f952 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -169,12 +169,20 @@ public class PackageAPI {
     @JsonProperty
     public List<String> files;
 
+    @JsonProperty
+    public String manifest;
+
+    @JsonProperty
+    public String manifestSHA512;
+
     public PkgVersion() {
     }
 
     public PkgVersion(AddVersion addVersion) {
       this.version = addVersion.version;
       this.files = addVersion.files;
+      this.manifest = addVersion.manifest;
+      this.manifestSHA512 = addVersion.manifestSHA512; // nocommit: compute and populate here
     }
 
 
@@ -344,7 +352,10 @@ public class PackageAPI {
     public String version;
     @JsonProperty(required = true)
     public List<String> files;
-
+    @JsonProperty(required = false)
+    public String manifestSHA512;
+    @JsonProperty(required = false)
+    public String manifest;
   }
 
   public static class DelVersion implements ReflectMapWriter {
diff --git a/solr/core/src/java/org/apache/solr/util/PackageTool.java b/solr/core/src/java/org/apache/solr/util/PackageTool.java
new file mode 100644
index 0000000..f5491e6
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/util/PackageTool.java
@@ -0,0 +1,302 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.util;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.io.IOUtils;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.config.Configurator;
+import org.apache.solr.client.solrj.impl.HttpClientUtil;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.packagemanager.SolrPackage;
+import org.apache.solr.packagemanager.SolrPackage.SolrPackageRelease;
+import org.apache.solr.packagemanager.SolrPackageInstance;
+import org.apache.solr.packagemanager.SolrPackageManager;
+import org.apache.solr.packagemanager.SolrPackageRepository;
+import org.apache.solr.packagemanager.SolrUpdateManager;
+import org.apache.solr.packagemanager.pf4j.PackageManagerException;
+import org.apache.solr.util.SolrCLI.StatusTool;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+
+public class PackageTool extends SolrCLI.ToolBase {
+  @Override
+  public String getName() {
+    return "package";
+  }
+
+  public static String solrUrl = null;
+
+  public SolrPackageManager packageManager;
+  public SolrUpdateManager updateManager;
+
+  @Override
+  protected void runImpl(CommandLine cli) throws Exception {
+    // Need a logging free, clean output going through to the user.
+    Configurator.setRootLevel(Level.OFF);
+
+    solrUrl = cli.getOptionValues("solrUrl")[cli.getOptionValues("solrUrl").length-1];
+    String solrBaseUrl = solrUrl.replaceAll("\\/solr$", ""); // strip out ending "/solr"
+    System.out.println("solr url: "+solrUrl+", solr base url: "+solrBaseUrl);
+
+    String zkHost = getZkHost(cli);
+
+    System.out.println("ZK: "+zkHost);
+    String cmd = cli.getArgs()[0];
+
+    try (SolrZkClient zkclient = new SolrZkClient(zkHost, 30000)) {
+      if (cmd != null) {
+        packageManager = new SolrPackageManager(new File("./plugins"), solrBaseUrl, zkHost); 
+        try {
+          updateManager = new SolrUpdateManager(packageManager,
+              getRepositoriesJson(zkclient), solrBaseUrl);
+
+          switch (cmd) {
+            case "add-repo":
+              addRepo(zkHost, cli.getArgs()[1], cli.getArgs()[2]);
+              break;
+            case "list":
+              list(cli.getArgList().subList(1, cli.getArgList().size()));
+              break;
+            case "list-available":
+              try {
+                available(cli.getArgList().subList(1, cli.getArgList().size()));
+              } catch (PackageManagerException ex) {
+                ex.printStackTrace();
+              }
+              break;
+            case "install":
+              install(cli.getArgList().subList(1, cli.getArgList().size()));
+              break;
+            case "deploy":
+              String colls[] = cli.getOptionValues("collections");
+              String params[] = cli.getOptionValues("param");
+              System.out.println("coll: "+Arrays.toString(colls)+", params: "+Arrays.toString(params));
+              deploy(cli.getArgList().get(1).toString(), colls, params);
+              break;
+            case "redeploy":
+              redeploy(cli.getArgList().subList(1, cli.getArgList().size()));
+              break;
+            case "update":
+              if (cli.getArgList().size()==1) {
+                update();
+              } else {
+                updatePackage(zkHost, cli.getArgs()[1], cli.getArgList().subList(2, cli.getArgList().size()));
+              }
+              break;
+            default:
+              throw new RuntimeException("Unrecognized command: "+cmd);
+          };
+        } finally {
+          packageManager.close();
+        }
+      }
+    }
+    System.out.println("khatam: "+cmd); // nocommit
+  }
+
+  protected void addRepo(String zkHost, String name, String uri) throws KeeperException, InterruptedException, MalformedURLException, IOException {
+    try (SolrZkClient zkClient = new SolrZkClient(zkHost, 30000)) {
+      String existingRepositoriesJson = getRepositoriesJson(zkClient);
+      System.out.println(existingRepositoriesJson);
+
+      List repos = new ObjectMapper().readValue(existingRepositoriesJson, List.class);
+      repos.add(new SolrPackageRepository(name, uri));
+      if (zkClient.exists("/repositories.json", true) == false) {
+        zkClient.create("/repositories.json", new ObjectMapper().writeValueAsString(repos).getBytes(), CreateMode.PERSISTENT, true);
+      } else {
+        zkClient.setData("/repositories.json", new ObjectMapper().writeValueAsString(repos).getBytes(), true);
+      }
+
+      if (zkClient.exists("/keys", true)==false) zkClient.create("/keys", new byte[0], CreateMode.PERSISTENT, true);
+      if (zkClient.exists("/keys/exe", true)==false) zkClient.create("/keys/exe", new byte[0], CreateMode.PERSISTENT, true);
+      if (zkClient.exists("/keys/exe/"+"pub_key.der", true)==false) zkClient.create("/keys/exe/"+"pub_key.der", new byte[0], CreateMode.PERSISTENT, true);
+      zkClient.setData("/keys/exe/"+"pub_key.der", IOUtils.toByteArray(new URL(uri+"/publickey.der").openStream()), true);
+
+      System.out.println("Added repository: "+name);
+      System.out.println(getRepositoriesJson(zkClient));
+    }
+  }
+
+  protected String getRepositoriesJson(SolrZkClient zkClient) throws UnsupportedEncodingException, KeeperException, InterruptedException {
+    if (zkClient.exists("/repositories.json", true)) {
+      return new String(zkClient.getData("/repositories.json", null, null, true), "UTF-8");
+    }
+    return "[]";
+  }
+
+  protected void list(List args) {
+    for (SolrPackageInstance pkg: packageManager.getPackages()) {
+      System.out.println(pkg.getPluginId()+" ("+pkg.getVersion()+")");
+    }
+  }
+  protected void available(List args) throws PackageManagerException {
+    System.out.println("Available packages:\n-----");
+    for (SolrPackage i: updateManager.getPackages()) {
+      SolrPackage plugin = (SolrPackage)i;
+      System.out.println(plugin.id + " \t\t"+plugin.description);
+      for (SolrPackageRelease version: plugin.versions) {
+        System.out.println("\tVersion: "+version.version);
+      }
+    }
+  }
+  protected void install(List args) throws PackageManagerException {
+    updateManager.installPackage(args.get(0).toString(), args.get(1).toString());
+    System.out.println(args.get(0).toString() + " installed.");
+  }
+  protected void deploy(String packageName,
+      String collections[], String parameters[]) throws PackageManagerException {
+    
+    System.out.println(packageManager.deployInstallPackage(packageName.split(":")[0], packageName.split(":").length==2? packageName.split(":")[1]: "latest",
+        Arrays.asList(collections), parameters));
+  }
+
+  protected void redeploy(List args) throws PackageManagerException {
+    throw new UnsupportedOperationException("redeploy not supported");
+    //System.out.println(packageManager.deployUpdatePackage(args.get(0).toString(), args.subList(1, args.size())));
+  }
+
+  protected void update() throws PackageManagerException {
+    if (updateManager.hasUpdates()) {
+      System.out.println("Available updates:\n-----");
+
+      for (SolrPackage i: updateManager.getUpdates()) {
+        SolrPackage plugin = (SolrPackage)i;
+        System.out.println(plugin.id + " \t\t"+plugin.description);
+        for (SolrPackageRelease version: plugin.versions) {
+          System.out.println("\tVersion: "+version.version);
+        }
+      }
+    } else {
+      System.out.println("No updates found. System is up to date.");
+    }
+  }
+
+  protected void updatePackage(String zkHost, String packageName, List args) throws PackageManagerException {
+    if (updateManager.hasUpdates()) {
+      String latestVersion = updateManager.getLastPackageRelease(packageName).version;
+      SolrPackageInstance installedPackage = packageManager.getPackage(packageName, "latest");
+      System.out.println("Updating ["+packageName+"] from " + installedPackage.getVersion() + " to version "+latestVersion);
+
+      List<String> collectionsDeployedIn = getDeployedCollections(zkHost, packageManager, installedPackage);
+      System.out.println("Already deployed on collections: "+collectionsDeployedIn);
+      updateManager.updatePackage(packageName, latestVersion);
+
+      SolrPackageInstance updatedPackage = packageManager.getPackage(packageName, "latest");
+      boolean res = packageManager.verify(updatedPackage, collectionsDeployedIn);
+      System.out.println("Verifying version "+updatedPackage.getVersion()+" on "+collectionsDeployedIn
+          +", result: "+res);
+      if (!res) throw new PackageManagerException("Failed verification after deployment");
+    } else {
+      System.out.println("Package "+packageName+" is already up to date.");
+    }
+  }
+
+  private List<String> getDeployedCollections(String zkHost, SolrPackageManager packageManager, SolrPackageInstance pkg) {
+
+    List<String> allCollections;
+    try (SolrZkClient zkClient = new SolrZkClient(zkHost, 30000)) {
+      allCollections = zkClient.getChildren("/collections", null, true);
+    } catch (KeeperException | InterruptedException e) {
+      throw new RuntimeException(e);
+    }
+    System.out.println("Need to verify if these collections have the plugin installed? "+ allCollections);
+    List<String> deployed = new ArrayList<String>();
+    for (String collection: allCollections) {
+      if (packageManager.verify(pkg, Collections.singletonList(collection))) {
+        deployed.add(collection);
+      }
+    }
+    return deployed;
+  }
+
+  @SuppressWarnings("static-access")
+  public Option[] getOptions() {
+    return new Option[] {
+        OptionBuilder
+        .withArgName("URL")
+        .hasArg()
+        .isRequired(true)
+        .withDescription("Address of the Solr Web application, defaults to: "+SolrCLI.DEFAULT_SOLR_URL)
+        .create("solrUrl"),
+
+        OptionBuilder
+        .withArgName("COLLECTIONS")
+        .hasArgs()
+        .isRequired(false)
+        .withDescription("Solr URL scheme: http or https, defaults to http if not specified")
+        .create("collections"),
+
+        OptionBuilder
+        .withArgName("PARAMS")
+        .hasArgs()
+        .isRequired(false)
+        .withDescription("Solr URL scheme: http or https, defaults to http if not specified")
+        .withLongOpt("param")
+        .create("p"),
+
+    };
+  }
+
+  private String getZkHost(CommandLine cli) throws Exception {
+    String zkHost = cli.getOptionValue("zkHost");
+    if (zkHost != null)
+      return zkHost;
+
+    // find it using the localPort
+
+    String systemInfoUrl = solrUrl+"/admin/info/system";
+    CloseableHttpClient httpClient = SolrCLI.getHttpClient();
+    try {
+      // hit Solr to get system info
+      Map<String,Object> systemInfo = SolrCLI.getJson(httpClient, systemInfoUrl, 2, true);
+
+      // convert raw JSON into user-friendly output
+      StatusTool statusTool = new StatusTool();
+      Map<String,Object> status = statusTool.reportStatus(solrUrl+"/", systemInfo, httpClient);
+      Map<String,Object> cloud = (Map<String, Object>)status.get("cloud");
+      if (cloud != null) {
+        String zookeeper = (String) cloud.get("ZooKeeper");
+        if (zookeeper.endsWith("(embedded)")) {
+          zookeeper = zookeeper.substring(0, zookeeper.length() - "(embedded)".length());
+        }
+        zkHost = zookeeper;
+      }
+    } finally {
+      HttpClientUtil.close(httpClient);
+    }
+
+    return zkHost;
+  }
+}
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 6374b83..3eddf06 100755
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -418,6 +418,8 @@ public class SolrCLI implements CLIO {
       return new AutoscalingTool();
     else if ("export".equals(toolType))
       return new ExportTool();
+    else if ("package".equals(toolType))
+      return new PackageTool();
 
     // If you add a built-in tool to this class, add it here to avoid
     // classpath scanning
diff --git a/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java b/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java
new file mode 100644
index 0000000..99286df
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.nio.file.Path;
+
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.util.PackageTool;
+import org.apache.solr.util.SolrCLI;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class PackageManagerCLITest extends SolrCloudTestCase {
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    System.setProperty("enable.packages", "true");
+
+    configureCluster(1)
+        .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
+        .configure();
+  }
+
+  @Test
+  public void testUpconfig() throws Exception {
+    // Use a full, explicit path for configset.
+
+    Path configSet = TEST_PATH().resolve("configsets");
+    Path srcPathCheck = configSet.resolve("cloud-subdirs").resolve("conf");
+    AbstractDistribZkTestBase.copyConfigUp(configSet, "cloud-subdirs", "upconfig1", cluster.getZkServer().getZkAddress());
+
+    // Now just use a name in the configsets directory, do we find it?
+    configSet = TEST_PATH().resolve("configsets");
+
+    PackageTool tool = new PackageTool();
+    String solrUrl = cluster.getJettySolrRunner(0).getBaseUrl().toString();
+    int res = run(tool, new String[] {"-solrUrl", solrUrl, "list"});
+    assertEquals("tool should have returned 0 for success ", 0, res);
+    
+    res = run(tool, new String[] {"-solrUrl", solrUrl, "add-repo", "fullstory",  "http://localhost:8081"});
+    assertEquals("tool should have returned 0 for success ", 0, res);
+
+    res = run(tool, new String[] {"-solrUrl", solrUrl, "list-available"});
+    assertEquals("tool should have returned 0 for success ", 0, res);
+
+    res = run(tool, new String[] {"-solrUrl", solrUrl, "install", "question-answer", "1.0.0"}); // no-commit (change to pkg:ver syntax)
+    assertEquals("tool should have returned 0 for success ", 0, res);
+    
+    res = run(tool, new String[] {"-solrUrl", solrUrl, "list"});
+    assertEquals("tool should have returned 0 for success ", 0, res);
+
+    CollectionAdminRequest
+      .createCollection("abc", "conf1", 2, 1)
+      .setMaxShardsPerNode(100)
+      .process(cluster.getSolrClient());
+
+    res = run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "question-answer", "-collections", "abc", "-p", "RH-HANDLER-PATH=/mypath2"});
+    assertEquals("tool should have returned 0 for success ", 0, res);
+    
+    res = run(tool, new String[] {"-solrUrl", solrUrl, "update", "question-answer"});
+    assertEquals("tool should have returned 0 for success ", 0, res);
+
+  }
+
+  private int run(PackageTool tool, String[] args) throws Exception {
+    int res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args));
+    return res;
+  }
+}


[lucene-solr] 09/10: Merging master

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 3c81f1579e2f063b86dceffac8a48c98c0cc7d20
Author: Ishan Chattopadhyaya <is...@apache.org>
AuthorDate: Tue Oct 29 04:13:21 2019 +0530

    Merging master
---
 .asf.yaml                                          |   19 +
 dev-tools/scripts/jenkins.build.ref.guide.sh       |    9 +-
 lucene/CHANGES.txt                                 |   18 +-
 .../miscellaneous/WordDelimiterGraphFilter.java    |   14 +-
 .../TestWordDelimiterGraphFilter.java              |   31 +
 .../src/java/org/apache/lucene/geo/Polygon2D.java  |    9 +-
 .../org/apache/lucene/search/BooleanQuery.java     |   21 +-
 .../apache/lucene/search/BottomValueChecker.java   |   52 -
 .../org/apache/lucene/search/IndexSearcher.java    |   10 +-
 .../apache/lucene/search/MaxScoreAccumulator.java  |   90 ++
 .../apache/lucene/search/TopFieldCollector.java    |   97 +-
 .../apache/lucene/search/TopScoreDocCollector.java |  103 +-
 .../java/org/apache/lucene/util/bkd/BKDReader.java |  236 ++++-
 .../lucene/search/TestMaxScoreAccumulator.java     |   57 +
 .../org/apache/lucene/search/TestQueryVisitor.java |   18 +
 .../apache/lucene/search/TestTopDocsCollector.java |  204 +++-
 .../lucene/search/TestTopFieldCollector.java       |  195 ++++
 .../test/org/apache/lucene/util/bkd/TestBKD.java   |   18 +-
 .../lucene/search/grouping/TopGroupsTest.java      |  235 +++++
 lucene/ivy-versions.properties                     |    7 +-
 .../search/join/GlobalOrdinalsWithScoreQuery.java  |    7 +
 .../src/java/org/apache/lucene/geo/Line2D.java     |   12 +-
 .../lucene/analysis/BaseTokenStreamTestCase.java   |   17 +-
 solr/CHANGES.txt                                   |   45 +-
 .../solr/handler/dataimport/DataImportHandler.java |    8 +-
 .../apache/solr/ltr/model/DefaultWrapperModel.java |    2 +-
 .../src/java/org/apache/solr/api/AnnotatedApi.java |    1 +
 .../src/java/org/apache/solr/cloud/CloudUtil.java  |    6 +-
 .../org/apache/solr/cloud/ReplicateFromLeader.java |    2 +-
 .../solr/cloud/api/collections/MoveReplicaCmd.java |    8 +-
 .../solr/cloud/autoscaling/AutoScalingHandler.java |    2 +-
 .../solr/cloud/autoscaling/ScheduledTriggers.java  |   61 ++
 .../cloud/autoscaling/sim/FakeDocIterator.java     |    0
 .../cloud/autoscaling/sim/SimCloudManager.java     |    7 +-
 .../autoscaling/sim/SimClusterStateProvider.java   |   13 +
 .../solr/cloud/autoscaling/sim/SimScenario.java    | 1091 ++++++++++++++++++++
 .../solr/cloud/autoscaling/sim/SimUtils.java       |   16 +-
 .../java/org/apache/solr/core/CoreContainer.java   |  107 +-
 .../org/apache/solr/core/HdfsDirectoryFactory.java |   15 +-
 .../src/java/org/apache/solr/core/PluginBag.java   |   33 +-
 .../src/java/org/apache/solr/core/PluginInfo.java  |    5 +
 .../src/java/org/apache/solr/core/SolrCore.java    |   71 +-
 .../java/org/apache/solr/core/SolrInfoBean.java    |    9 +-
 .../org/apache/solr/core/SolrResourceLoader.java   |    5 +-
 .../apache/solr/filestore/DistribPackageStore.java |   17 +-
 .../org/apache/solr/filestore/PackageStoreAPI.java |    2 +-
 .../apache/solr/handler/ReplicationHandler.java    |   47 +-
 .../apache/solr/handler/RequestHandlerBase.java    |   49 +-
 .../org/apache/solr/handler/SolrConfigHandler.java |   21 +-
 .../solr/handler/admin/CoreAdminHandler.java       |    9 +-
 .../solr/handler/admin/CoreAdminOperation.java     |    2 +
 .../solr/handler/component/ExpandComponent.java    |   14 +-
 .../handler/component/HttpShardHandlerFactory.java |   19 +-
 .../solr/handler/component/SearchHandler.java      |   60 +-
 .../solr/handler/component/SuggestComponent.java   |   23 +-
 .../solr/handler/component/TermsComponent.java     |   17 +-
 .../solr/highlight/HighlightingPluginBase.java     |   19 +-
 .../apache/solr/metrics/SolrCoreMetricManager.java |   51 +-
 .../org/apache/solr/metrics/SolrMetricManager.java |   28 +-
 .../apache/solr/metrics/SolrMetricProducer.java    |   62 +-
 .../apache/solr/metrics/SolrMetricsContext.java    |  114 ++
 .../src/java/org/apache/solr/pkg/PackageAPI.java   |   22 +-
 .../java/org/apache/solr/pkg/PackageListeners.java |    5 +-
 .../java/org/apache/solr/pkg/PackageLoader.java    |   33 +-
 .../org/apache/solr/request/json/RequestUtil.java  |    2 +-
 .../apache/solr/schema/JsonPreAnalyzedParser.java  |    2 +-
 .../java/org/apache/solr/search/CaffeineCache.java |   18 +-
 .../java/org/apache/solr/search/FastLRUCache.java  |   55 +-
 .../src/java/org/apache/solr/search/LFUCache.java  |   27 +-
 .../src/java/org/apache/solr/search/LRUCache.java  |   37 +-
 .../java/org/apache/solr/search/PointMerger.java   |    4 +
 .../src/java/org/apache/solr/search/SolrCache.java |    4 +-
 .../org/apache/solr/search/SolrCacheHolder.java    |   12 +-
 .../org/apache/solr/search/SolrFieldCacheBean.java |   16 +-
 .../org/apache/solr/search/SolrIndexSearcher.java  |   69 +-
 .../distributed/command/GroupConverter.java        |   13 +-
 .../java/org/apache/solr/security/AuditEvent.java  |   22 +-
 .../apache/solr/security/AuditLoggerPlugin.java    |   45 +-
 .../apache/solr/security/AuthenticationPlugin.java |   61 +-
 .../solr/security/MultiDestinationAuditLogger.java |   10 +-
 .../java/org/apache/solr/servlet/HttpSolrCall.java |   26 +-
 .../apache/solr/servlet/SolrDispatchFilter.java    |    3 +-
 .../org/apache/solr/store/blockcache/Metrics.java  |   20 +-
 .../solr/store/hdfs/HdfsLocalityReporter.java      |   21 +-
 .../apache/solr/update/DirectUpdateHandler2.java   |   67 +-
 .../src/java/org/apache/solr/update/PeerSync.java  |   10 +-
 .../org/apache/solr/update/PeerSyncWithLeader.java |   10 +-
 .../org/apache/solr/update/SolrIndexWriter.java    |   38 +-
 .../java/org/apache/solr/update/UpdateHandler.java |    6 -
 .../src/java/org/apache/solr/update/UpdateLog.java |   24 +-
 .../org/apache/solr/update/UpdateShardHandler.java |   25 +-
 .../processor/DistributedUpdateProcessor.java      |   12 +-
 .../processor/DistributedZkUpdateProcessor.java    |   12 +-
 .../processor/UpdateRequestProcessorChain.java     |   43 +-
 .../src/java/org/apache/solr/util/SolrCLI.java     |   17 +-
 .../stats/InstrumentedHttpListenerFactory.java     |   17 +-
 .../stats/InstrumentedHttpRequestExecutor.java     |   22 +-
 ...rumentedPoolingHttpClientConnectionManager.java |   30 +-
 .../test-files/solr/collection1/conf/schema.xml    |    5 +
 .../solr/security/auditlog_plugin_security.json    |    4 +-
 .../src/test/org/apache/solr/SolrInfoBeanTest.java |    4 +-
 .../org/apache/solr/TestDistributedGrouping.java   |    7 +-
 .../test/org/apache/solr/TestGroupingSearch.java   |   25 +
 .../org/apache/solr/cloud/MoveReplicaTest.java     |   49 +-
 .../apache/solr/cloud/TestLRUStatsCacheCloud.java  |    3 +-
 .../solr/cloud/TestQueryingOnDownCollection.java   |    8 -
 .../org/apache/solr/cloud/TestTlogReplica.java     |  205 ++--
 .../AutoAddReplicasIntegrationTest.java            |    2 +-
 .../autoscaling/AutoAddReplicasPlanActionTest.java |    2 +-
 .../cloud/autoscaling/sim/TestSimScenario.java     |  138 +++
 .../autoscaling/sim/TestSnapshotCloudManager.java  |   10 +-
 .../apache/solr/core/HdfsDirectoryFactoryTest.java |    3 +-
 .../test/org/apache/solr/core/MockInfoBean.java    |   19 +-
 .../core/MockQuerySenderListenerReqHandler.java    |    8 +-
 .../apache/solr/core/TestSolrConfigHandler.java    |    2 +-
 .../solr/filestore/TestDistribPackageStore.java    |    7 +-
 .../org/apache/solr/handler/TestReqParamsAPI.java  |   10 +
 .../solr/handler/TestSolrConfigHandlerCloud.java   |    9 +
 .../solr/handler/admin/MetricsHandlerTest.java     |  146 ++-
 .../handler/admin/MetricsHistoryHandlerTest.java   |    7 +-
 .../component/DistributedExpandComponentTest.java  |    4 +-
 .../component/DistributedTermsComponentTest.java   |   26 +-
 .../solr/handler/component/TermsComponentTest.java |  102 +-
 .../handler/component/TestExpandComponent.java     |  147 ++-
 .../apache/solr/metrics/SolrMetricTestUtils.java   |    4 +-
 .../src/test/org/apache/solr/pkg/TestPackages.java |   10 +-
 .../org/apache/solr/search/TestCaffeineCache.java  |    7 +-
 .../org/apache/solr/search/TestFastLRUCache.java   |   40 +-
 .../test/org/apache/solr/search/TestLFUCache.java  |   16 +-
 .../test/org/apache/solr/search/TestLRUCache.java  |   22 +-
 .../apache/solr/search/TestSolrFieldCacheBean.java |    7 +-
 .../solr/search/facet/TestJsonFacetRefinement.java |    4 +-
 .../apache/solr/search/facet/TestJsonFacets.java   |    9 +-
 .../solr/search/join/TestCloudNestedDocsSort.java  |   16 +-
 .../solr/security/AuditLoggerIntegrationTest.java  |  413 +++++---
 .../solr/security/BasicAuthIntegrationTest.java    |    6 +-
 .../solr/security/BasicAuthOnSingleNodeTest.java   |   12 +-
 .../solr/security/CallbackAuditLoggerPlugin.java   |   29 +-
 .../solr/store/blockcache/BufferStoreTest.java     |    4 +-
 .../apache/solr/update/processor/RuntimeUrp.java   |    2 +-
 .../asciidoctor-ant-1.6.0-alpha.5.jar.sha1         |    1 -
 solr/licenses/asciidoctor-ant-1.6.2.jar.sha1       |    1 +
 solr/solr-ref-guide/README.adoc                    |   26 +-
 solr/solr-ref-guide/src/_config.yml.template       |    4 +-
 .../adding-custom-plugins-in-solrcloud-mode.adoc   |   13 +-
 solr/solr-ref-guide/src/aliases.adoc               |    6 +-
 .../authentication-and-authorization-plugins.adoc  |   10 +-
 .../src/basic-authentication-plugin.adoc           |    2 +-
 .../src/cluster-node-management.adoc               |    2 +-
 .../src/common-query-parameters.adoc               |    2 +-
 solr/solr-ref-guide/src/enabling-ssl.adoc          |    2 +-
 .../src/jwt-authentication-plugin.adoc             |    2 +-
 .../src/kerberos-authentication-plugin.adoc        |    2 +-
 solr/solr-ref-guide/src/metrics-reporting.adoc     |    6 +-
 ...onitoring-solr-with-prometheus-and-grafana.adoc |    6 +-
 .../src/performance-statistics-reference.adoc      |    2 +-
 .../src/query-settings-in-solrconfig.adoc          |    3 +-
 .../solr-ref-guide/src/request-parameters-api.adoc |   29 +-
 .../src/resource-and-plugin-loading.adoc           |    2 +-
 .../src/rule-based-authorization-plugin.adoc       |    2 +-
 .../setting-up-an-external-zookeeper-ensemble.adoc |    6 +-
 .../src/solr-control-script-reference.adoc         |   14 +-
 solr/solr-ref-guide/src/solr-tracing.adoc          |    2 +-
 solr/solr-ref-guide/src/solr-tutorial.adoc         |    2 +-
 solr/solr-ref-guide/src/solr-upgrade-notes.adoc    |   65 ++
 .../src/solrcloud-autoscaling-api.adoc             |   12 +-
 .../solrcloud-autoscaling-policy-preferences.adoc  |  143 ++-
 .../src/solrcloud-autoscaling-triggers.adoc        |   10 +-
 .../src/taking-solr-to-production.adoc             |   18 +-
 .../src/updating-parts-of-documents.adoc           |    2 +-
 ...ding-data-with-solr-cell-using-apache-tika.adoc |    6 +-
 ...ng-zookeeper-to-manage-configuration-files.adoc |    2 +-
 solr/solrj/ivy.xml                                 |    2 +
 .../solrj/impl/SolrClientNodeStateProvider.java    |    2 +-
 .../solr/client/solrj/request/beans/Package.java}  |   32 +-
 .../client/solrj/request/beans/pakage-info.java}   |   17 +-
 .../apache/solr/common/util/CommandOperation.java  |    5 +-
 .../org/apache/solr/common/util/ExecutorUtil.java  |    1 +
 .../apache/solr/common}/util/ReflectMapWriter.java |    4 +-
 .../java/org/apache/solr/common/util/StrUtils.java |    2 +-
 .../java/org/apache/solr/common/util/Utils.java    |   35 +-
 solr/solrj/src/java/org/noggit/ObjectBuilder.java  |   43 +-
 .../apispec/core.config.Params.Commands.json       |    2 +-
 .../apache/solr/client/solrj/SolrExampleTests.java |   39 +
 .../client/solrj/cloud/autoscaling/TestPolicy.java |   39 +-
 .../solr/client/solrj/request/TestV2Request.java   |   18 +-
 .../src/test/org/noggit/TestObjectBuilder.java     |   33 +-
 187 files changed, 5078 insertions(+), 1554 deletions(-)

diff --git a/.asf.yaml b/.asf.yaml
new file mode 100644
index 0000000..f47b523
--- /dev/null
+++ b/.asf.yaml
@@ -0,0 +1,19 @@
+# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories
+
+github:
+  description: "Apache Lucene and Solr open-source search software"
+  homepage: https://lucene.apache.org/
+  labels:
+    - lucene
+    - solr
+    - search
+    - nosql
+    - java
+    - backend
+    - search-engine
+    - information-retrieval
+
+  enabled_merge_buttons:
+    squash: true
+    merge:  false
+    rebase: false
\ No newline at end of file
diff --git a/dev-tools/scripts/jenkins.build.ref.guide.sh b/dev-tools/scripts/jenkins.build.ref.guide.sh
index f1a8bdb..c3203ee 100755
--- a/dev-tools/scripts/jenkins.build.ref.guide.sh
+++ b/dev-tools/scripts/jenkins.build.ref.guide.sh
@@ -61,10 +61,11 @@ echoRun "rvm $RUBY_VERSION@$GEMSET"      # Activate this project's gemset
 # Install gems in the gemset.  Param --force disables dependency conflict detection.
 echoRun "gem install --force --version 3.5.0 jekyll"
 echoRun "gem uninstall --all --ignore-dependencies asciidoctor"  # Get rid of all versions
-echoRun "gem install --force --version 1.5.6.2 asciidoctor"
-echoRun "gem install --force --version 2.1.0 jekyll-asciidoc"
-echoRun "gem install --force --version 1.1.2 pygments.rb"
-echoRun "gem install --force --version 3.0.9 slim"
+echoRun "gem install --force --version 2.0.10 asciidoctor"
+echoRun "gem install --force --version 3.0.0 jekyll-asciidoc"
+echoRun "gem install --force --version 4.0.1 slim"
+echoRun "gem install --force --version 2.0.10 tilt"
+echoRun "gem install --force --version 1.1.5 concurrent-ruby"
 
 cd solr/solr-ref-guide
 
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 6fea135..3948ee5 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -75,8 +75,9 @@ New Features
 (No changes)
 
 Improvements
----------------------
-(No changes)
+
+* LUCENE-9006: WordDelimiterGraphFilter's catenateAll token is now ordered before any token parts, like WDF did.
+  (David Smiley)
 
 Optimizations
 
@@ -89,6 +90,12 @@ Optimizations
   documents, since computing the number of live documents is an expensive
   operation. (Adrien Grand)
 
+* LUCENE-8992: TopFieldCollector and TopScoreDocCollector can now share minimum scores across leaves
+  concurrently. (Adrien Grand, Atri Sharma, Jim Ferenczi)
+
+* LUCENE-8932: BKDReader's index is now stored off-heap when the IndexInput is
+  an instance of ByteBufferIndexInput. (Jack Conradson via Adrien Grand)
+
 Bug Fixes
 
 * LUCENE-9001: Fix race condition in SetOnce. (Przemko Robakowski)
@@ -217,6 +224,12 @@ Bug Fixes
 * LUCENE-8755: spatial-extras quad and packed quad prefix trees could throw a
   NullPointerException for certain cell edge coordinates (Chongchen Chen, David Smiley)
 
+* LUCENE-9005: BooleanQuery.visit() would pull subVisitors from its parent visitor, rather
+  than from a visitor for its own specific query.  This could cause problems when BQ was
+  nested under another BQ. Instead, we now pull a MUST subvisitor, pass it to any MUST
+  subclauses, and then pull SHOULD, MUST_NOT and FILTER visitors from it rather than from
+  the parent.  (Alan Woodward)
+
 Other
 
 * LUCENE-8778 LUCENE-8911 LUCENE-8957: Define analyzer SPI names as static final fields and document the names in Javadocs.
@@ -234,6 +247,7 @@ Other
 * LUCENE-8999: LuceneTestCase.expectThrows now propogates assert/assumption failures up to the test
   w/o wrapping in a new assertion failure unless the caller has explicitly expected them (hossman)
 
+* LUCENE-8062: GlobalOrdinalsWithScoreQuery is no longer eligible for query caching. (Jim Ferenczi)
 
 ======================= Lucene 8.2.0 =======================
 
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java
index a04eaff..9d03c7e 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java
@@ -447,7 +447,7 @@ public final class WordDelimiterGraphFilter extends TokenFilter {
   private class PositionSorter extends InPlaceMergeSorter {
     @Override
     protected int compare(int i, int j) {
-      // sort by smaller start position
+      // smaller start position
       int iPosStart = bufferedParts[4*i];
       int jPosStart = bufferedParts[4*j];
       int cmp = Integer.compare(iPosStart, jPosStart);
@@ -455,10 +455,18 @@ public final class WordDelimiterGraphFilter extends TokenFilter {
         return cmp;
       }
 
-      // tie break by longest pos length:
+      // longest pos length:
       int iPosEnd = bufferedParts[4*i+1];
       int jPosEnd = bufferedParts[4*j+1];
-      return Integer.compare(jPosEnd, iPosEnd);
+      cmp = Integer.compare(jPosEnd, iPosEnd);
+      if (cmp != 0) {
+        return cmp;
+      }
+
+      // smaller start offset
+      int iOff = bufferedParts[4*i + 2];
+      int jOff = bufferedParts[4*j + 2];
+      return Integer.compare(iOff, jOff);
     }
 
     @Override
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java
index 41109b8..67b8035 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java
@@ -397,6 +397,34 @@ public class TestWordDelimiterGraphFilter extends BaseTokenStreamTestCase {
     a.close();
   }
 
+  // https://issues.apache.org/jira/browse/LUCENE-9006
+  public void testCatenateAllEmittedBeforeParts() throws Exception {
+    // no number parts
+    final int flags = PRESERVE_ORIGINAL | GENERATE_WORD_PARTS | CATENATE_ALL;
+
+    //not using getAnalyzer because we want adjustInternalOffsets=true
+    Analyzer a = new Analyzer() {
+      @Override
+      public TokenStreamComponents createComponents(String field) {
+        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
+        return new TokenStreamComponents(tokenizer, new WordDelimiterGraphFilter(tokenizer, true, DEFAULT_WORD_DELIM_TABLE, flags, null));
+      }
+    };
+
+    // input starts with a number, but we don't generate numbers.
+    //   Nonetheless preserve-original and concatenate-all show up first.
+    assertTokenStreamContents(a.tokenStream("dummy", "8-other"),
+        new String[] { "8-other", "8other", "other" }, new int[]{0, 0, 2}, new int[]{7, 7, 7});
+
+    boolean useCharFilter = true;
+    boolean graphOffsetsAreCorrect = false; // note: could solve via always incrementing wordPos on first word ('8')
+    checkAnalysisConsistency(random(), a, useCharFilter, "8-other", graphOffsetsAreCorrect);
+
+    verify("8-other", flags); // uses getAnalyzer which uses adjustInternalOffsets=false which works
+
+    a.close();
+  }
+
   /** concat numbers + words + all */
   public void testLotsOfConcatenating() throws Exception {
     final int flags = GENERATE_WORD_PARTS | GENERATE_NUMBER_PARTS | CATENATE_WORDS | CATENATE_NUMBERS | CATENATE_ALL | SPLIT_ON_CASE_CHANGE | SPLIT_ON_NUMERICS | STEM_ENGLISH_POSSESSIVE;    
@@ -947,6 +975,9 @@ public class TestWordDelimiterGraphFilter extends BaseTokenStreamTestCase {
 
       fail(b.toString());
     }
+
+    boolean useCharFilter = true;
+    checkAnalysisConsistency(random(), getAnalyzer(flags), useCharFilter, text);
   }
 
   public void testOnlyNumbers() throws Exception {
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java b/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
index 3b06d21..cf06ebd 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
@@ -156,12 +156,15 @@ public class Polygon2D implements Component2D {
     if (ax == bx && bx == cx && ay == by && by == cy) {
       // indexed "triangle" is a point: shortcut by checking contains
       return internalContains(ax, ay) ? Relation.CELL_INSIDE_QUERY : Relation.CELL_OUTSIDE_QUERY;
-    } else if ((ax == cx && ay == cy) || (bx == cx && by == cy)) {
+    } else if (ax == cx && ay == cy) {
       // indexed "triangle" is a line segment: shortcut by calling appropriate method
       return relateIndexedLineSegment(minX, maxX, minY, maxY, ax, ay, bx, by);
-    } else if ((ax == bx && ay == by)) {
+    } else if (ax == bx && ay == by) {
       // indexed "triangle" is a line segment: shortcut by calling appropriate method
-      return relateIndexedLineSegment(minX, maxX, minY, maxY, ax, ay, cx, cy);
+      return relateIndexedLineSegment(minX, maxX, minY, maxY, bx, by, cx, cy);
+    } else if (bx == cx && by == cy) {
+      // indexed "triangle" is a line segment: shortcut by calling appropriate method
+      return relateIndexedLineSegment(minX, maxX, minY, maxY, cx, cy, ax, ay);
     }
     // indexed "triangle" is a triangle:
     return relateIndexedTriangle(minX, maxX, minY, maxY, ax, ay, bx, by, cx, cy);
diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
index def2455..2e2d81b 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
@@ -506,14 +506,19 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
 
   @Override
   public void visit(QueryVisitor visitor) {
-    for (Map.Entry<Occur, Collection<Query>> entry : clauseSets.entrySet()) {
-      Occur clauseOccur = entry.getKey();
-      Collection<Query> clauseQueries = entry.getValue();
-
-      if (clauseQueries.size() > 0) {
-        QueryVisitor v = visitor.getSubVisitor(clauseOccur, this);
-        for (Query q : clauseQueries) {
-          q.visit(v);
+    QueryVisitor sub = visitor.getSubVisitor(Occur.MUST, this);
+    for (BooleanClause.Occur occur : clauseSets.keySet()) {
+      if (clauseSets.get(occur).size() > 0) {
+        if (occur == Occur.MUST) {
+          for (Query q : clauseSets.get(occur)) {
+            q.visit(sub);
+          }
+        }
+        else {
+          QueryVisitor v = sub.getSubVisitor(occur, this);
+          for (Query q : clauseSets.get(occur)) {
+            q.visit(v);
+          }
         }
       }
     }
diff --git a/lucene/core/src/java/org/apache/lucene/search/BottomValueChecker.java b/lucene/core/src/java/org/apache/lucene/search/BottomValueChecker.java
deleted file mode 100644
index 174de35..0000000
--- a/lucene/core/src/java/org/apache/lucene/search/BottomValueChecker.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.lucene.search;
-
-/**
- * Maintains the bottom value across multiple collectors
- */
-abstract class BottomValueChecker {
-  /** Maintains global bottom score as the maximum of all bottom scores */
-  private static class MaximumBottomScoreChecker extends BottomValueChecker {
-    private volatile float maxMinScore;
-
-    @Override
-    public void updateThreadLocalBottomValue(float value) {
-      if (value <= maxMinScore) {
-        return;
-      }
-      synchronized (this) {
-        if (value > maxMinScore) {
-          maxMinScore = value;
-        }
-      }
-    }
-
-    @Override
-    public float getBottomValue() {
-      return maxMinScore;
-    }
-  }
-
-  public static BottomValueChecker createMaxBottomScoreChecker() {
-    return new MaximumBottomScoreChecker();
-  }
-
-  public abstract void updateThreadLocalBottomValue(float value);
-  public abstract float getBottomValue();
-}
\ No newline at end of file
diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
index e0f0cdf..2c2d669 100644
--- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
+++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
@@ -470,11 +470,11 @@ public class IndexSearcher {
       private final HitsThresholdChecker hitsThresholdChecker = (executor == null || leafSlices.length <= 1) ? HitsThresholdChecker.create(TOTAL_HITS_THRESHOLD) :
           HitsThresholdChecker.createShared(TOTAL_HITS_THRESHOLD);
 
-      private final BottomValueChecker bottomValueChecker = BottomValueChecker.createMaxBottomScoreChecker();
+      private final MaxScoreAccumulator minScoreAcc = (executor == null || leafSlices.length <= 1) ? null : new MaxScoreAccumulator();
 
       @Override
       public TopScoreDocCollector newCollector() throws IOException {
-        return TopScoreDocCollector.create(cappedNumHits, after, hitsThresholdChecker, bottomValueChecker);
+        return TopScoreDocCollector.create(cappedNumHits, after, hitsThresholdChecker, minScoreAcc);
       }
 
       @Override
@@ -598,15 +598,17 @@ public class IndexSearcher {
     final int cappedNumHits = Math.min(numHits, limit);
     final Sort rewrittenSort = sort.rewrite(this);
 
-    final CollectorManager<TopFieldCollector, TopFieldDocs> manager = new CollectorManager<TopFieldCollector, TopFieldDocs>() {
+    final CollectorManager<TopFieldCollector, TopFieldDocs> manager = new CollectorManager<>() {
 
       private final HitsThresholdChecker hitsThresholdChecker = (executor == null || leafSlices.length <= 1) ? HitsThresholdChecker.create(TOTAL_HITS_THRESHOLD) :
           HitsThresholdChecker.createShared(TOTAL_HITS_THRESHOLD);
 
+      private final MaxScoreAccumulator minScoreAcc = (executor == null || leafSlices.length <= 1) ? null : new MaxScoreAccumulator();
+
       @Override
       public TopFieldCollector newCollector() throws IOException {
         // TODO: don't pay the price for accurate hit counts by default
-        return TopFieldCollector.create(rewrittenSort, cappedNumHits, after, hitsThresholdChecker);
+        return TopFieldCollector.create(rewrittenSort, cappedNumHits, after, hitsThresholdChecker, minScoreAcc);
       }
 
       @Override
diff --git a/lucene/core/src/java/org/apache/lucene/search/MaxScoreAccumulator.java b/lucene/core/src/java/org/apache/lucene/search/MaxScoreAccumulator.java
new file mode 100644
index 0000000..5265c89
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/search/MaxScoreAccumulator.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search;
+
+import java.util.concurrent.atomic.LongAccumulator;
+
+/**
+ * Maintains the maximum score and its corresponding document id concurrently
+ */
+final class MaxScoreAccumulator {
+  // we use 2^10-1 to check the remainder with a bitwise operation
+  static final int DEFAULT_INTERVAL = 0x3ff;
+
+  // scores are always positive
+  final LongAccumulator acc = new LongAccumulator(Long::max, Long.MIN_VALUE);
+
+  // non-final and visible for tests
+  long modInterval;
+
+  MaxScoreAccumulator() {
+    this.modInterval = DEFAULT_INTERVAL;
+  }
+
+  void accumulate(int docID, float score) {
+    assert docID >= 0 && score >= 0;
+    long encode = (((long) Float.floatToIntBits(score)) << 32) | docID;
+    acc.accumulate(encode);
+  }
+
+  DocAndScore get() {
+    long value = acc.get();
+    if (value == Long.MIN_VALUE) {
+      return null;
+    }
+    float score = Float.intBitsToFloat((int) (value >> 32));
+    int docID = (int) value;
+    return new DocAndScore(docID, score);
+  }
+
+  static class DocAndScore implements Comparable<DocAndScore> {
+    final int docID;
+    final float score;
+
+    DocAndScore(int docID, float score) {
+      this.docID = docID;
+      this.score = score;
+    }
+
+    @Override
+    public int compareTo(DocAndScore o) {
+      int cmp = Float.compare(score, o.score);
+      if (cmp == 0) {
+        return Integer.compare(docID, o.docID);
+      }
+      return cmp;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) return true;
+      if (o == null || getClass() != o.getClass()) return false;
+      DocAndScore result = (DocAndScore) o;
+      return docID == result.docID &&
+          Float.compare(result.score, score) == 0;
+    }
+
+    @Override
+    public String toString() {
+      return "DocAndScore{" +
+          "docID=" + docID +
+          ", score=" + score +
+          '}';
+    }
+  }
+}
\ No newline at end of file
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
index bf1c929..c638bf9 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
@@ -27,6 +27,7 @@ import java.util.Objects;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.ReaderUtil;
 import org.apache.lucene.search.FieldValueHitQueue.Entry;
+import org.apache.lucene.search.MaxScoreAccumulator.DocAndScore;
 import org.apache.lucene.search.TotalHits.Relation;
 
 /**
@@ -101,8 +102,9 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
     final FieldValueHitQueue<Entry> queue;
 
     public SimpleFieldCollector(Sort sort, FieldValueHitQueue<Entry> queue, int numHits,
-                                HitsThresholdChecker hitsThresholdChecker) {
-      super(queue, numHits, hitsThresholdChecker, sort.needsScores());
+                                HitsThresholdChecker hitsThresholdChecker,
+                                MaxScoreAccumulator minScoreAcc) {
+      super(queue, numHits, hitsThresholdChecker, sort.needsScores(), minScoreAcc);
       this.sort = sort;
       this.queue = queue;
     }
@@ -123,13 +125,22 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
         @Override
         public void setScorer(Scorable scorer) throws IOException {
           super.setScorer(scorer);
+          minCompetitiveScore = 0f;
           updateMinCompetitiveScore(scorer);
+          if (minScoreAcc != null) {
+            updateGlobalMinCompetitiveScore(scorer);
+          }
         }
 
         @Override
         public void collect(int doc) throws IOException {
           ++totalHits;
           hitsThresholdChecker.incrementHitCount();
+
+          if (minScoreAcc != null && (totalHits & minScoreAcc.modInterval) == 0) {
+            updateGlobalMinCompetitiveScore(scorer);
+          }
+
           if (queueFull) {
             if (collectedAllCompetitiveHits || reverseMul * comparator.compareBottom(doc) <= 0) {
               // since docs are visited in doc Id order, if compare is 0, it means
@@ -143,8 +154,8 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
                   collectedAllCompetitiveHits = true;
                 }
               } else if (totalHitsRelation == Relation.EQUAL_TO) {
-                // we just reached totalHitsThreshold, we can start setting the min
-                // competitive score now
+                // we can start setting the min competitive score if the
+                // threshold is reached for the first time here.
                 updateMinCompetitiveScore(scorer);
               }
               return;
@@ -185,8 +196,8 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
     final FieldDoc after;
 
     public PagingFieldCollector(Sort sort, FieldValueHitQueue<Entry> queue, FieldDoc after, int numHits,
-                                HitsThresholdChecker hitsThresholdChecker) {
-      super(queue, numHits, hitsThresholdChecker, sort.needsScores());
+                                HitsThresholdChecker hitsThresholdChecker, MaxScoreAccumulator minScoreAcc) {
+      super(queue, numHits, hitsThresholdChecker, sort.needsScores(), minScoreAcc);
       this.sort = sort;
       this.queue = queue;
       this.after = after;
@@ -213,7 +224,11 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
         @Override
         public void setScorer(Scorable scorer) throws IOException {
           super.setScorer(scorer);
+          minCompetitiveScore = 0f;
           updateMinCompetitiveScore(scorer);
+          if (minScoreAcc != null) {
+            updateGlobalMinCompetitiveScore(scorer);
+          }
         }
 
         @Override
@@ -223,6 +238,10 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
           totalHits++;
           hitsThresholdChecker.incrementHitCount();
 
+          if (minScoreAcc != null && (totalHits & minScoreAcc.modInterval) == 0) {
+            updateGlobalMinCompetitiveScore(scorer);
+          }
+
           if (queueFull) {
             // Fastmatch: return if this hit is no better than
             // the worst hit currently in the queue:
@@ -237,8 +256,10 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
                 } else {
                   collectedAllCompetitiveHits = true;
                 }
-              } else if (totalHitsRelation == Relation.GREATER_THAN_OR_EQUAL_TO) {
-                  updateMinCompetitiveScore(scorer);
+              } else if (totalHitsRelation == TotalHits.Relation.EQUAL_TO) {
+                // we can start setting the min competitive score if the
+                // threshold is reached for the first time here.
+                updateMinCompetitiveScore(scorer);
               }
               return;
             }
@@ -247,6 +268,11 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
           final int topCmp = reverseMul * comparator.compareTop(doc);
           if (topCmp > 0 || (topCmp == 0 && doc <= afterDoc)) {
             // Already collected on a previous page
+            if (totalHitsRelation == TotalHits.Relation.EQUAL_TO) {
+              // we just reached totalHitsThreshold, we can start setting the min
+              // competitive score now
+              updateMinCompetitiveScore(scorer);
+            }
             return;
           }
 
@@ -286,6 +312,12 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
   final HitsThresholdChecker hitsThresholdChecker;
   final FieldComparator.RelevanceComparator firstComparator;
   final boolean canSetMinScore;
+
+  // an accumulator that maintains the maximum of the segment's minimum competitive scores
+  final MaxScoreAccumulator minScoreAcc;
+  // the current local minimum competitive score already propagated to the underlying scorer
+  float minCompetitiveScore;
+
   final int numComparators;
   FieldValueHitQueue.Entry bottom = null;
   boolean queueFull;
@@ -299,7 +331,8 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
   // visibility, then anyone will be able to extend the class, which is not what
   // we want.
   private TopFieldCollector(FieldValueHitQueue<Entry> pq, int numHits,
-                            HitsThresholdChecker hitsThresholdChecker, boolean needsScores) {
+                            HitsThresholdChecker hitsThresholdChecker, boolean needsScores,
+                            MaxScoreAccumulator minScoreAcc) {
     super(pq);
     this.needsScores = needsScores;
     this.numHits = numHits;
@@ -318,6 +351,7 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
       scoreMode = needsScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES;
       canSetMinScore = false;
     }
+    this.minScoreAcc = minScoreAcc;
   }
 
   @Override
@@ -325,12 +359,36 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
     return scoreMode;
   }
 
+  protected void updateGlobalMinCompetitiveScore(Scorable scorer) throws IOException {
+    assert minScoreAcc != null;
+    if (canSetMinScore
+          && hitsThresholdChecker.isThresholdReached()) {
+      // we can start checking the global maximum score even
+      // if the local queue is not full because the threshold
+      // is reached.
+      DocAndScore maxMinScore = minScoreAcc.get();
+      if (maxMinScore != null && maxMinScore.score > minCompetitiveScore) {
+        scorer.setMinCompetitiveScore(maxMinScore.score);
+        minCompetitiveScore = maxMinScore.score;
+        totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
+      }
+    }
+  }
+
   protected void updateMinCompetitiveScore(Scorable scorer) throws IOException {
-    if (canSetMinScore && hitsThresholdChecker.isThresholdReached() && queueFull) {
+    if (canSetMinScore
+          && queueFull
+          && hitsThresholdChecker.isThresholdReached()) {
       assert bottom != null && firstComparator != null;
       float minScore = firstComparator.value(bottom.slot);
-      scorer.setMinCompetitiveScore(minScore);
-      totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
+      if (minScore > minCompetitiveScore) {
+        scorer.setMinCompetitiveScore(minScore);
+        minCompetitiveScore = minScore;
+        totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
+        if (minScoreAcc != null) {
+          minScoreAcc.accumulate(bottom.doc, minScore);
+        }
+      }
     }
   }
 
@@ -389,14 +447,14 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
       throw new IllegalArgumentException("totalHitsThreshold must be >= 0, got " + totalHitsThreshold);
     }
 
-    return create(sort, numHits, after, HitsThresholdChecker.create(totalHitsThreshold));
+    return create(sort, numHits, after, HitsThresholdChecker.create(totalHitsThreshold), null /* bottomValueChecker */);
   }
 
   /**
-   * Same as above with an additional parameter to allow passing in the threshold checker
+   * Same as above with additional parameters to allow passing in the threshold checker and the max score accumulator.
    */
   static TopFieldCollector create(Sort sort, int numHits, FieldDoc after,
-                                         HitsThresholdChecker hitsThresholdChecker) {
+                                         HitsThresholdChecker hitsThresholdChecker, MaxScoreAccumulator minScoreAcc) {
 
     if (sort.fields.length == 0) {
       throw new IllegalArgumentException("Sort must contain at least one field");
@@ -413,7 +471,7 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
     FieldValueHitQueue<Entry> queue = FieldValueHitQueue.create(sort.fields, numHits);
 
     if (after == null) {
-      return new SimpleFieldCollector(sort, queue, numHits, hitsThresholdChecker);
+      return new SimpleFieldCollector(sort, queue, numHits, hitsThresholdChecker, minScoreAcc);
     } else {
       if (after.fields == null) {
         throw new IllegalArgumentException("after.fields wasn't set; you must pass fillFields=true for the previous search");
@@ -423,22 +481,25 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
         throw new IllegalArgumentException("after.fields has " + after.fields.length + " values but sort has " + sort.getSort().length);
       }
 
-      return new PagingFieldCollector(sort, queue, after, numHits, hitsThresholdChecker);
+      return new PagingFieldCollector(sort, queue, after, numHits, hitsThresholdChecker, minScoreAcc);
     }
   }
 
   /**
    * Create a CollectorManager which uses a shared hit counter to maintain number of hits
+   * and a shared {@link MaxScoreAccumulator} to propagate the minimum score accross segments if
+   * the primary sort is by relevancy.
    */
   public static CollectorManager<TopFieldCollector, TopFieldDocs> createSharedManager(Sort sort, int numHits, FieldDoc after,
                                                                                  int totalHitsThreshold) {
     return new CollectorManager<>() {
 
       private final HitsThresholdChecker hitsThresholdChecker = HitsThresholdChecker.createShared(totalHitsThreshold);
+      private final MaxScoreAccumulator minScoreAcc = new MaxScoreAccumulator();
 
       @Override
       public TopFieldCollector newCollector() throws IOException {
-        return create(sort, numHits, after, hitsThresholdChecker);
+        return create(sort, numHits, after, hitsThresholdChecker, minScoreAcc);
       }
 
       @Override
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
index 10c4fc4..2c77107 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.Collection;
 
 import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.MaxScoreAccumulator.DocAndScore;
 
 /**
  * A {@link Collector} implementation that collects the top-scoring hits,
@@ -50,19 +51,24 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
   private static class SimpleTopScoreDocCollector extends TopScoreDocCollector {
 
     SimpleTopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker,
-                               BottomValueChecker bottomValueChecker) {
-      super(numHits, hitsThresholdChecker, bottomValueChecker);
+                               MaxScoreAccumulator minScoreAcc) {
+      super(numHits, hitsThresholdChecker, minScoreAcc);
     }
 
     @Override
     public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
-      final int docBase = context.docBase;
+      // reset the minimum competitive score
+      docBase = context.docBase;
       return new ScorerLeafCollector() {
 
         @Override
         public void setScorer(Scorable scorer) throws IOException {
           super.setScorer(scorer);
+          minCompetitiveScore = 0f;
           updateMinCompetitiveScore(scorer);
+          if (minScoreAcc != null) {
+            updateGlobalMinCompetitiveScore(scorer);
+          }
         }
 
         @Override
@@ -75,8 +81,12 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
           totalHits++;
           hitsThresholdChecker.incrementHitCount();
 
+          if (minScoreAcc != null && (totalHits & minScoreAcc.modInterval) == 0) {
+            updateGlobalMinCompetitiveScore(scorer);
+          }
+
           if (score <= pqTop.score) {
-            if (totalHitsRelation == TotalHits.Relation.EQUAL_TO && hitsThresholdChecker.isThresholdReached()) {
+            if (totalHitsRelation == TotalHits.Relation.EQUAL_TO) {
               // we just reached totalHitsThreshold, we can start setting the min
               // competitive score now
               updateMinCompetitiveScore(scorer);
@@ -102,8 +112,8 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
     private int collectedHits;
 
     PagingTopScoreDocCollector(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker,
-                               BottomValueChecker bottomValueChecker) {
-      super(numHits, hitsThresholdChecker, bottomValueChecker);
+                               MaxScoreAccumulator minScoreAcc) {
+      super(numHits, hitsThresholdChecker, minScoreAcc);
       this.after = after;
       this.collectedHits = 0;
     }
@@ -123,7 +133,7 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
 
     @Override
     public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
-      final int docBase = context.docBase;
+      docBase = context.docBase;
       final int afterDoc = after.doc - context.docBase;
 
       return new ScorerLeafCollector() {
@@ -137,9 +147,13 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
           totalHits++;
           hitsThresholdChecker.incrementHitCount();
 
+          if (minScoreAcc != null && (totalHits & minScoreAcc.modInterval) == 0) {
+            updateGlobalMinCompetitiveScore(scorer);
+          }
+
           if (score > after.score || (score == after.score && doc <= afterDoc)) {
             // hit was collected on a previous page
-            if (totalHitsRelation == TotalHits.Relation.EQUAL_TO && hitsThresholdChecker.isThresholdReached()) {
+            if (totalHitsRelation == TotalHits.Relation.EQUAL_TO) {
               // we just reached totalHitsThreshold, we can start setting the min
               // competitive score now
               updateMinCompetitiveScore(scorer);
@@ -148,6 +162,12 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
           }
 
           if (score <= pqTop.score) {
+            if (totalHitsRelation == TotalHits.Relation.EQUAL_TO) {
+              // we just reached totalHitsThreshold, we can start setting the min
+              // competitive score now
+              updateMinCompetitiveScore(scorer);
+            }
+
             // Since docs are returned in-order (i.e., increasing doc Id), a document
             // with equal score to pqTop.score cannot compete since HitQueue favors
             // documents with lower doc Ids. Therefore reject those docs too.
@@ -201,7 +221,7 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
   }
 
   static TopScoreDocCollector create(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker,
-                                     BottomValueChecker bottomValueChecker) {
+                                     MaxScoreAccumulator minScoreAcc) {
 
     if (numHits <= 0) {
       throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count");
@@ -212,25 +232,26 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
     }
 
     if (after == null) {
-      return new SimpleTopScoreDocCollector(numHits, hitsThresholdChecker, bottomValueChecker);
+      return new SimpleTopScoreDocCollector(numHits, hitsThresholdChecker, minScoreAcc);
     } else {
-      return new PagingTopScoreDocCollector(numHits, after, hitsThresholdChecker, bottomValueChecker);
+      return new PagingTopScoreDocCollector(numHits, after, hitsThresholdChecker, minScoreAcc);
     }
   }
 
   /**
    * Create a CollectorManager which uses a shared hit counter to maintain number of hits
+   * and a shared {@link MaxScoreAccumulator} to propagate the minimum score accross segments
    */
   public static CollectorManager<TopScoreDocCollector, TopDocs> createSharedManager(int numHits, FieldDoc after,
                                                                                       int totalHitsThreshold) {
     return new CollectorManager<>() {
 
       private final HitsThresholdChecker hitsThresholdChecker = HitsThresholdChecker.createShared(totalHitsThreshold);
-      private final BottomValueChecker bottomValueChecker = BottomValueChecker.createMaxBottomScoreChecker();
+      private final MaxScoreAccumulator minScoreAcc = new MaxScoreAccumulator();
 
       @Override
       public TopScoreDocCollector newCollector() throws IOException {
-        return TopScoreDocCollector.create(numHits, after, hitsThresholdChecker, bottomValueChecker);
+        return TopScoreDocCollector.create(numHits, after, hitsThresholdChecker, minScoreAcc);
       }
 
       @Override
@@ -246,13 +267,15 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
     };
   }
 
+  int docBase;
   ScoreDoc pqTop;
   final HitsThresholdChecker hitsThresholdChecker;
-  final BottomValueChecker bottomValueChecker;
+  final MaxScoreAccumulator minScoreAcc;
+  float minCompetitiveScore;
 
   // prevents instantiation
   TopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker,
-                       BottomValueChecker bottomValueChecker) {
+                       MaxScoreAccumulator minScoreAcc) {
     super(new HitQueue(numHits, true));
     assert hitsThresholdChecker != null;
 
@@ -260,7 +283,7 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
     // that at this point top() is already initialized.
     pqTop = pq.top();
     this.hitsThresholdChecker = hitsThresholdChecker;
-    this.bottomValueChecker = bottomValueChecker;
+    this.minScoreAcc = minScoreAcc;
   }
 
   @Override
@@ -277,31 +300,41 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
     return hitsThresholdChecker.scoreMode();
   }
 
+  protected void updateGlobalMinCompetitiveScore(Scorable scorer) throws IOException {
+    assert minScoreAcc != null;
+    DocAndScore maxMinScore = minScoreAcc.get();
+    if (maxMinScore != null) {
+      // since we tie-break on doc id and collect in doc id order we can require
+      // the next float if the global minimum score is set on a document id that is
+      // smaller than the ids in the current leaf
+      float score = docBase > maxMinScore.docID ? Math.nextUp(maxMinScore.score) : maxMinScore.score;
+      if (score > minCompetitiveScore) {
+        assert hitsThresholdChecker.isThresholdReached();
+        scorer.setMinCompetitiveScore(score);
+        minCompetitiveScore = score;
+        totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
+      }
+    }
+  }
+
   protected void updateMinCompetitiveScore(Scorable scorer) throws IOException {
     if (hitsThresholdChecker.isThresholdReached()
-          && ((bottomValueChecker != null && bottomValueChecker.getBottomValue() > 0)
-          || (pqTop != null && pqTop.score != Float.NEGATIVE_INFINITY))) { // -Infinity is the score of sentinels
+          && pqTop != null
+          && pqTop.score != Float.NEGATIVE_INFINITY) { // -Infinity is the score of sentinels
       // since we tie-break on doc id and collect in doc id order, we can require
       // the next float
-      float bottomScore = Float.NEGATIVE_INFINITY;
-
-      if (pqTop != null && pqTop.score != Float.NEGATIVE_INFINITY) {
-        bottomScore = Math.nextUp(pqTop.score);
-
-        if (bottomValueChecker != null) {
-          bottomValueChecker.updateThreadLocalBottomValue(pqTop.score);
+      float localMinScore = Math.nextUp(pqTop.score);
+      if (localMinScore > minCompetitiveScore) {
+        scorer.setMinCompetitiveScore(localMinScore);
+        totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
+        minCompetitiveScore = localMinScore;
+        if (minScoreAcc != null) {
+          // we don't use the next float but we register the document
+          // id so that other leaves can require it if they are after
+          // the current maximum
+          minScoreAcc.accumulate(pqTop.doc, pqTop.score);
         }
       }
-
-      // Global bottom can only be greater than or equal to the local bottom score
-      // The updating of global bottom score for this hit before getting here should
-      // ensure that
-      if (bottomValueChecker != null && bottomValueChecker.getBottomValue() > bottomScore) {
-        bottomScore = bottomValueChecker.getBottomValue();
-      }
-
-      scorer.setMinCompetitiveScore(bottomScore);
-      totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
     }
   }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
index 14ea758..ff7bf3b 100644
--- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
+++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java
@@ -17,6 +17,7 @@
 package org.apache.lucene.util.bkd;
 
 import java.io.IOException;
+import java.io.UncheckedIOException;
 import java.util.Arrays;
 
 import org.apache.lucene.codecs.CodecUtil;
@@ -24,6 +25,8 @@ import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.PointValues;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.store.ByteBufferIndexInput;
+import org.apache.lucene.store.DataInput;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.BytesRef;
@@ -34,6 +37,124 @@ import org.apache.lucene.util.MathUtil;
  * @lucene.experimental */
 
 public final class BKDReader extends PointValues implements Accountable {
+
+  private static abstract class BKDInput extends DataInput implements Cloneable {
+    abstract long getMinLeafBlockFP();
+    abstract long ramBytesUsed();
+
+    abstract int getPosition();
+    abstract void setPosition(int pos) throws IOException;
+
+    @Override
+    public BKDInput clone() {
+      return (BKDInput)super.clone();
+    }
+  }
+
+  private static class BKDOffHeapInput extends BKDInput implements Cloneable {
+
+    private final IndexInput packedIndex;
+    private final long minLeafBlockFP;
+
+    BKDOffHeapInput(IndexInput packedIndex) throws IOException {
+      this.packedIndex = packedIndex;
+      this.minLeafBlockFP = packedIndex.clone().readVLong();
+    }
+
+    private BKDOffHeapInput(IndexInput packedIndex, long minLeadBlockFP) {
+      this.packedIndex = packedIndex;
+      this.minLeafBlockFP = minLeadBlockFP;
+    }
+
+    @Override
+    public BKDOffHeapInput clone() {
+        return new BKDOffHeapInput(packedIndex.clone(), minLeafBlockFP);
+    }
+
+    @Override
+    long getMinLeafBlockFP() {
+      return minLeafBlockFP;
+    }
+
+    @Override
+    long ramBytesUsed() {
+      return 0;
+    }
+
+    @Override
+    int getPosition() {
+      return (int)packedIndex.getFilePointer();
+    }
+
+    @Override
+    void setPosition(int pos) throws IOException {
+        packedIndex.seek(pos);
+    }
+
+    @Override
+    public byte readByte() throws IOException {
+      return packedIndex.readByte();
+    }
+
+    @Override
+    public void readBytes(byte[] b, int offset, int len) throws IOException {
+      packedIndex.readBytes(b, offset, len);
+    }
+  }
+
+  private static class BKDOnHeapInput extends BKDInput implements Cloneable {
+
+    private final ByteArrayDataInput packedIndex;
+    private final long minLeafBlockFP;
+
+    BKDOnHeapInput(IndexInput packedIndex, int numBytes) throws IOException {
+      byte[] packedBytes = new byte[numBytes];
+      packedIndex.readBytes(packedBytes, 0, numBytes);
+      this.packedIndex = new ByteArrayDataInput(packedBytes);
+      this.minLeafBlockFP = this.packedIndex.clone().readVLong();
+    }
+
+    private BKDOnHeapInput(ByteArrayDataInput packedIndex, long minLeadBlockFP) {
+      this.packedIndex = packedIndex;
+      this.minLeafBlockFP = minLeadBlockFP;
+    }
+
+    @Override
+    public BKDOnHeapInput clone() {
+      return new BKDOnHeapInput((ByteArrayDataInput)packedIndex.clone(), minLeafBlockFP);
+    }
+
+    @Override
+    long getMinLeafBlockFP() {
+      return minLeafBlockFP;
+    }
+
+    @Override
+    long ramBytesUsed() {
+      return packedIndex.length();
+    }
+
+    @Override
+    int getPosition() {
+      return packedIndex.getPosition();
+    }
+
+    @Override
+    void setPosition(int pos) {
+      packedIndex.setPosition(pos);
+    }
+
+    @Override
+    public byte readByte() throws IOException {
+      return packedIndex.readByte();
+    }
+
+    @Override
+    public void readBytes(byte[] b, int offset, int len) throws IOException {
+      packedIndex.readBytes(b, offset, len);
+    }
+  }
+
   // Packed array of byte[] holding all split values in the full binary tree:
   final int leafNodeOffset;
   final int numDataDims;
@@ -50,10 +171,18 @@ public final class BKDReader extends PointValues implements Accountable {
   protected final int packedBytesLength;
   protected final int packedIndexBytesLength;
 
-  final byte[] packedIndex;
+  final BKDInput packedIndex;
 
   /** Caller must pre-seek the provided {@link IndexInput} to the index location that {@link BKDWriter#finish} returned */
   public BKDReader(IndexInput in) throws IOException {
+    this(in, in instanceof ByteBufferIndexInput);
+  }
+
+  /**
+   * Caller must pre-seek the provided {@link IndexInput} to the index location that {@link BKDWriter#finish} returned
+   * and specify {@code true} to store BKD off-heap ({@code false} otherwise)
+   */
+  public BKDReader(IndexInput in, boolean offHeap) throws IOException {
     version = CodecUtil.checkHeader(in, BKDWriter.CODEC_NAME, BKDWriter.VERSION_START, BKDWriter.VERSION_CURRENT);
     numDataDims = in.readVInt();
     if (version >= BKDWriter.VERSION_SELECTIVE_INDEXING) {
@@ -87,14 +216,18 @@ public final class BKDReader extends PointValues implements Accountable {
     docCount = in.readVInt();
 
     int numBytes = in.readVInt();
-    packedIndex = new byte[numBytes];
-    in.readBytes(packedIndex, 0, numBytes);
+    IndexInput slice = in.slice("packedIndex", in.getFilePointer(), numBytes);
+    if (offHeap) {
+      packedIndex = new BKDOffHeapInput(slice);
+    } else {
+      packedIndex = new BKDOnHeapInput(slice, numBytes);
+    }
 
     this.in = in;
   }
 
   long getMinLeafBlockFP() {
-    return new ByteArrayDataInput(packedIndex).readVLong();
+    return packedIndex.getMinLeafBlockFP();
   }
 
   /** Used to walk the in-heap index. The format takes advantage of the limited
@@ -108,7 +241,7 @@ public final class BKDReader extends PointValues implements Accountable {
     private int splitDim;
     private final byte[][] splitPackedValueStack;
     // used to read the packed byte[]
-    private final ByteArrayDataInput in;
+    private final BKDInput in;
     // holds the minimum (left most) leaf block file pointer for each level we've recursed to:
     private final long[] leafBlockFPStack;
     // holds the address, in the packed byte[] index, of the left-node of each level:
@@ -139,7 +272,7 @@ public final class BKDReader extends PointValues implements Accountable {
       splitDims = new int[treeDepth+1];
       negativeDeltas = new boolean[numIndexDims*(treeDepth+1)];
 
-      in = new ByteArrayDataInput(packedIndex);
+      in = packedIndex.clone();
       splitValuesStack[0] = new byte[packedIndexBytesLength];
       readNodeData(false);
       scratch = new BytesRef();
@@ -156,7 +289,11 @@ public final class BKDReader extends PointValues implements Accountable {
       System.arraycopy(negativeDeltas, (level-1)*numIndexDims, negativeDeltas, level*numIndexDims, numIndexDims);
       assert splitDim != -1;
       negativeDeltas[level*numIndexDims+splitDim] = true;
-      in.setPosition(nodePosition);
+      try {
+        in.setPosition(nodePosition);
+      } catch (IOException e) {
+        throw new UncheckedIOException(e);
+      }
       readNodeData(true);
     }
     
@@ -186,7 +323,11 @@ public final class BKDReader extends PointValues implements Accountable {
       System.arraycopy(negativeDeltas, (level-1)*numIndexDims, negativeDeltas, level*numIndexDims, numIndexDims);
       assert splitDim != -1;
       negativeDeltas[level*numIndexDims+splitDim] = false;
-      in.setPosition(nodePosition);
+      try {
+        in.setPosition(nodePosition);
+      } catch (IOException e) {
+        throw new UncheckedIOException(e);
+      }
       readNodeData(false);
     }
 
@@ -271,51 +412,54 @@ public final class BKDReader extends PointValues implements Accountable {
     }
 
     private void readNodeData(boolean isLeft) {
+      try {
+        leafBlockFPStack[level] = leafBlockFPStack[level - 1];
 
-      leafBlockFPStack[level] = leafBlockFPStack[level-1];
+        // read leaf block FP delta
+        if (isLeft == false) {
+          leafBlockFPStack[level] += in.readVLong();
+        }
 
-      // read leaf block FP delta
-      if (isLeft == false) {
-        leafBlockFPStack[level] += in.readVLong();
-      }
+        if (isLeafNode()) {
+          splitDim = -1;
+        } else {
 
-      if (isLeafNode()) {
-        splitDim = -1;
-      } else {
+          // read split dim, prefix, firstDiffByteDelta encoded as int:
+          int code = in.readVInt();
+          splitDim = code % numIndexDims;
+          splitDims[level] = splitDim;
+          code /= numIndexDims;
+          int prefix = code % (1 + bytesPerDim);
+          int suffix = bytesPerDim - prefix;
 
-        // read split dim, prefix, firstDiffByteDelta encoded as int:
-        int code = in.readVInt();
-        splitDim = code % numIndexDims;
-        splitDims[level] = splitDim;
-        code /= numIndexDims;
-        int prefix = code % (1+bytesPerDim);
-        int suffix = bytesPerDim - prefix;
+          if (splitValuesStack[level] == null) {
+            splitValuesStack[level] = new byte[packedIndexBytesLength];
+          }
+          System.arraycopy(splitValuesStack[level - 1], 0, splitValuesStack[level], 0, packedIndexBytesLength);
+          if (suffix > 0) {
+            int firstDiffByteDelta = code / (1 + bytesPerDim);
+            if (negativeDeltas[level * numIndexDims + splitDim]) {
+              firstDiffByteDelta = -firstDiffByteDelta;
+            }
+            int oldByte = splitValuesStack[level][splitDim * bytesPerDim + prefix] & 0xFF;
+            splitValuesStack[level][splitDim * bytesPerDim + prefix] = (byte) (oldByte + firstDiffByteDelta);
+            in.readBytes(splitValuesStack[level], splitDim * bytesPerDim + prefix + 1, suffix - 1);
+          } else {
+            // our split value is == last split value in this dim, which can happen when there are many duplicate values
+          }
 
-        if (splitValuesStack[level] == null) {
-          splitValuesStack[level] = new byte[packedIndexBytesLength];
-        }
-        System.arraycopy(splitValuesStack[level-1], 0, splitValuesStack[level], 0, packedIndexBytesLength);
-        if (suffix > 0) {
-          int firstDiffByteDelta = code / (1+bytesPerDim);
-          if (negativeDeltas[level*numIndexDims + splitDim]) {
-            firstDiffByteDelta = -firstDiffByteDelta;
+          int leftNumBytes;
+          if (nodeID * 2 < leafNodeOffset) {
+            leftNumBytes = in.readVInt();
+          } else {
+            leftNumBytes = 0;
           }
-          int oldByte = splitValuesStack[level][splitDim*bytesPerDim+prefix] & 0xFF;
-          splitValuesStack[level][splitDim*bytesPerDim+prefix] = (byte) (oldByte + firstDiffByteDelta);
-          in.readBytes(splitValuesStack[level], splitDim*bytesPerDim+prefix+1, suffix-1);
-        } else {
-          // our split value is == last split value in this dim, which can happen when there are many duplicate values
-        }
 
-        int leftNumBytes;
-        if (nodeID * 2 < leafNodeOffset) {
-          leftNumBytes = in.readVInt();
-        } else {
-          leftNumBytes = 0;
+          leftNodePositions[level] = in.getPosition();
+          rightNodePositions[level] = leftNodePositions[level] + leftNumBytes;
         }
-
-        leftNodePositions[level] = in.getPosition();
-        rightNodePositions[level] = leftNodePositions[level] + leftNumBytes;
+      } catch (IOException e) {
+        throw new UncheckedIOException(e);
       }
     }
   }
@@ -738,7 +882,7 @@ public final class BKDReader extends PointValues implements Accountable {
 
   @Override
   public long ramBytesUsed() {
-    return packedIndex.length;
+    return packedIndex.ramBytesUsed();
   }
 
   @Override
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMaxScoreAccumulator.java b/lucene/core/src/test/org/apache/lucene/search/TestMaxScoreAccumulator.java
new file mode 100644
index 0000000..c656d47
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMaxScoreAccumulator.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search;
+
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestMaxScoreAccumulator extends LuceneTestCase {
+  public void testSimple() {
+    MaxScoreAccumulator acc = new MaxScoreAccumulator();
+    acc.accumulate(0, 0f);
+    acc.accumulate(10, 0f);
+    assertEquals(0f, acc.get().score, 0);
+    assertEquals(10, acc.get().docID, 0);
+    acc.accumulate(100, 1000f);
+    assertEquals(1000f, acc.get().score, 0);
+    assertEquals(100, acc.get().docID, 0);
+    acc.accumulate(1000, 5f);
+    assertEquals(1000f, acc.get().score, 0);
+    assertEquals(100, acc.get().docID, 0);
+    acc.accumulate(99, 1000f);
+    assertEquals(1000f, acc.get().score, 0);
+    assertEquals(100, acc.get().docID, 0);
+    acc.accumulate(0, 1001f);
+    assertEquals(1001f, acc.get().score, 0);
+    assertEquals(0, acc.get().docID, 0);
+  }
+
+  public void testRandom() {
+    MaxScoreAccumulator acc = new MaxScoreAccumulator();
+    int numDocs = atLeast(100);
+    int maxDocs = atLeast(10000);
+    MaxScoreAccumulator.DocAndScore max = new MaxScoreAccumulator.DocAndScore(-1, -1);
+    for (int i = 0; i < numDocs; i++) {
+      MaxScoreAccumulator.DocAndScore res = new MaxScoreAccumulator.DocAndScore(random().nextInt(maxDocs), random().nextFloat());
+      acc.accumulate(res.docID, res.score);
+      if (res.compareTo(max) > 0) {
+        max = res;
+      }
+    }
+    assertEquals(max, acc.get());
+  }
+}
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java b/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java
index f1a4310..27254d8 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestQueryVisitor.java
@@ -328,6 +328,24 @@ public class TestQueryVisitor extends LuceneTestCase {
     minimumTermSet.clear();
     extractor.collectTerms(minimumTermSet);
     assertThat(minimumTermSet, equalTo(expected2));
+
+    BooleanQuery bq = new BooleanQuery.Builder()
+        .add(new BooleanQuery.Builder()
+            .add(new TermQuery(new Term("f", "1")), BooleanClause.Occur.MUST)
+            .add(new TermQuery(new Term("f", "61")), BooleanClause.Occur.MUST)
+            .add(new TermQuery(new Term("f", "211")), BooleanClause.Occur.FILTER)
+            .add(new TermQuery(new Term("f", "5")), BooleanClause.Occur.SHOULD)
+            .build(), BooleanClause.Occur.SHOULD)
+        .add(new PhraseQuery("f", "3333", "44444"), BooleanClause.Occur.SHOULD)
+        .build();
+    QueryNode ex2 = new ConjunctionNode();
+    bq.visit(ex2);
+    Set<Term> expected3 = new HashSet<>(Arrays.asList(new Term("f", "1"), new Term("f", "3333")));
+    minimumTermSet.clear();
+    ex2.collectTerms(minimumTermSet);
+    assertThat(minimumTermSet, equalTo(expected3));
+    ex2.getWeight(); // force sort order
+    assertThat(ex2.toString(), equalTo("AND(AND(OR(AND(TERM(f:3333),TERM(f:44444)),AND(TERM(f:1),TERM(f:61),AND(TERM(f:211))))))"));
   }
 
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
index 130449b..5755bb6 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
@@ -25,6 +25,8 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
@@ -112,29 +114,27 @@ public class TestTopDocsCollector extends LuceneTestCase {
     return tdc;
   }
 
-  private TopDocsCollector<ScoreDoc> doSearchWithThreshold(int numResults, int thresHold) throws IOException {
-    Query q = new MatchAllDocsQuery();
-    IndexSearcher searcher = newSearcher(reader);
+  private TopDocsCollector<ScoreDoc> doSearchWithThreshold(int numResults, int thresHold, Query q, IndexReader indexReader) throws IOException {
+    IndexSearcher searcher = new IndexSearcher(indexReader);
     TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(numResults, thresHold);
     searcher.search(q, tdc);
     return tdc;
   }
 
-  private TopDocs doConcurrentSearchWithThreshold(int numResults, int threshold, IndexReader reader) throws IOException {
-    Query q = new MatchAllDocsQuery();
+  private TopDocs doConcurrentSearchWithThreshold(int numResults, int threshold, Query q, IndexReader indexReader) throws IOException {
     ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
         new LinkedBlockingQueue<Runnable>(),
         new NamedThreadFactory("TestTopDocsCollector"));
-    IndexSearcher searcher = new IndexSearcher(reader, service);
-
-    CollectorManager collectorManager = TopScoreDocCollector.createSharedManager(numResults,
-        null, threshold);
+    try {
+      IndexSearcher searcher = new IndexSearcher(indexReader, service);
 
-    TopDocs tdc = (TopDocs) searcher.search(q, collectorManager);
+      CollectorManager collectorManager = TopScoreDocCollector.createSharedManager(numResults,
+          null, threshold);
 
-    service.shutdown();
-
-    return tdc;
+      return (TopDocs) searcher.search(q, collectorManager);
+    } finally {
+      service.shutdown();
+    }
   }
   
   @Override
@@ -344,8 +344,8 @@ public class TestTopDocsCollector extends LuceneTestCase {
     assertEquals(2, reader.leaves().size());
     w.close();
 
-    TopDocsCollector collector = doSearchWithThreshold(5, 10);
-    TopDocs tdc = doConcurrentSearchWithThreshold(5, 10, reader);
+    TopDocsCollector collector = doSearchWithThreshold( 5, 10, q, reader);
+    TopDocs tdc = doConcurrentSearchWithThreshold(5, 10, q, reader);
     TopDocs tdc2 = collector.topDocs();
 
     CheckHits.checkEqual(q, tdc.scoreDocs, tdc2.scoreDocs);
@@ -402,7 +402,174 @@ public class TestTopDocsCollector extends LuceneTestCase {
     dir.close();
   }
 
-  public void testGlobalScore() throws Exception {
+  public void testConcurrentMinScore() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
+    Document doc = new Document();
+    w.addDocuments(Arrays.asList(doc, doc, doc, doc, doc));
+    w.flush();
+    w.addDocuments(Arrays.asList(doc, doc, doc, doc, doc, doc));
+    w.flush();
+    w.addDocuments(Arrays.asList(doc, doc));
+    w.flush();
+    IndexReader reader = DirectoryReader.open(w);
+    assertEquals(3, reader.leaves().size());
+    w.close();
+
+    CollectorManager<TopScoreDocCollector, TopDocs> manager =
+        TopScoreDocCollector.createSharedManager(2, null, 0);
+    TopScoreDocCollector collector = manager.newCollector();
+    TopScoreDocCollector collector2 = manager.newCollector();
+    assertTrue(collector.minScoreAcc == collector2.minScoreAcc);
+    MaxScoreAccumulator minValueChecker = collector.minScoreAcc;
+    // force the check of the global minimum score on every round
+    minValueChecker.modInterval = 0;
+
+    ScoreAndDoc scorer = new ScoreAndDoc();
+    ScoreAndDoc scorer2 = new ScoreAndDoc();
+
+    LeafCollector leafCollector = collector.getLeafCollector(reader.leaves().get(0));
+    leafCollector.setScorer(scorer);
+    LeafCollector leafCollector2 = collector2.getLeafCollector(reader.leaves().get(1));
+    leafCollector2.setScorer(scorer2);
+
+    scorer.doc = 0;
+    scorer.score = 3;
+    leafCollector.collect(0);
+    assertNull(minValueChecker.get());
+    assertNull(scorer.minCompetitiveScore);
+
+    scorer2.doc = 0;
+    scorer2.score = 6;
+    leafCollector2.collect(0);
+    assertNull(minValueChecker.get());
+    assertNull(scorer2.minCompetitiveScore);
+
+    scorer.doc = 1;
+    scorer.score = 2;
+    leafCollector.collect(1);
+    assertEquals(2f, minValueChecker.get().score, 0f);
+    assertEquals(Math.nextUp(2f), scorer.minCompetitiveScore, 0f);
+    assertNull(scorer2.minCompetitiveScore);
+
+    scorer2.doc = 1;
+    scorer2.score = 9;
+    leafCollector2.collect(1);
+    assertEquals(6f, minValueChecker.get().score, 0f);
+    assertEquals(Math.nextUp(2f), scorer.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(6f), scorer2.minCompetitiveScore, 0f);
+
+    scorer2.doc = 2;
+    scorer2.score = 7;
+    leafCollector2.collect(2);
+    assertEquals(minValueChecker.get().score, 7f, 0f);
+    assertEquals(Math.nextUp(2f), scorer.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(7f), scorer2.minCompetitiveScore, 0f);
+
+    scorer2.doc = 3;
+    scorer2.score = 1;
+    leafCollector2.collect(3);
+    assertEquals(minValueChecker.get().score, 7f, 0f);
+    assertEquals(Math.nextUp(2f), scorer.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(7f), scorer2.minCompetitiveScore, 0f);
+
+    scorer.doc = 2;
+    scorer.score = 10;
+    leafCollector.collect(2);
+    assertEquals(minValueChecker.get().score, 7f, 0f);
+    assertEquals(7f, scorer.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(7f), scorer2.minCompetitiveScore, 0f);
+
+    scorer.doc = 3;
+    scorer.score = 11;
+    leafCollector.collect(3);
+    assertEquals(minValueChecker.get().score, 10, 0f);
+    assertEquals(Math.nextUp(10f), scorer.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(7f), scorer2.minCompetitiveScore, 0f);
+
+    TopScoreDocCollector collector3 = manager.newCollector();
+    LeafCollector leafCollector3 = collector3.getLeafCollector(reader.leaves().get(2));
+    ScoreAndDoc scorer3 = new ScoreAndDoc();
+    leafCollector3.setScorer(scorer3);
+    assertEquals(Math.nextUp(10f), scorer3.minCompetitiveScore, 0f);
+
+    scorer3.doc = 0;
+    scorer3.score = 1f;
+    leafCollector3.collect(0);
+    assertEquals(10f, minValueChecker.get().score, 0f);
+    assertEquals(Math.nextUp(10f), scorer3.minCompetitiveScore, 0f);
+
+    scorer.doc = 4;
+    scorer.score = 11;
+    leafCollector.collect(4);
+    assertEquals(11f, minValueChecker.get().score, 0f);
+    assertEquals(Math.nextUp(11f), scorer.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(7f), scorer2.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(10f), scorer3.minCompetitiveScore, 0f);
+
+    scorer3.doc = 1;
+    scorer3.score = 2f;
+    leafCollector3.collect(1);
+    assertEquals(minValueChecker.get().score, 11f, 0f);
+    assertEquals(Math.nextUp(11f), scorer.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(7f), scorer2.minCompetitiveScore, 0f);
+    assertEquals(Math.nextUp(11f), scorer3.minCompetitiveScore, 0f);
+
+
+    TopDocs topDocs = manager.reduce(Arrays.asList(collector, collector2, collector3));
+    assertEquals(11, topDocs.totalHits.value);
+    assertEquals(new TotalHits(11, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), topDocs.totalHits);
+
+    reader.close();
+    dir.close();
+  }
+
+  public void testRandomMinCompetitiveScore() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig());
+    int numDocs = atLeast(1000);
+    for (int i = 0; i < numDocs; ++i) {
+      int numAs = 1 + random().nextInt(5);
+      int numBs = random().nextFloat() < 0.5f ?  0 : 1 + random().nextInt(5);
+      int numCs = random().nextFloat() < 0.1f ?  0 : 1 + random().nextInt(5);
+      Document doc = new Document();
+      for (int j = 0; j < numAs; ++j) {
+        doc.add(new StringField("f", "A", Field.Store.NO));
+      }
+      for (int j = 0; j < numBs; ++j) {
+        doc.add(new StringField("f", "B", Field.Store.NO));
+      }
+      for (int j = 0; j < numCs; ++j) {
+        doc.add(new StringField("f", "C", Field.Store.NO));
+      }
+      w.addDocument(doc);
+    }
+    IndexReader indexReader = w.getReader();
+    w.close();
+    Query[] queries = new Query[]{
+        new TermQuery(new Term("f", "A")),
+        new TermQuery(new Term("f", "B")),
+        new TermQuery(new Term("f", "C")),
+        new BooleanQuery.Builder()
+            .add(new TermQuery(new Term("f", "A")), BooleanClause.Occur.MUST)
+            .add(new TermQuery(new Term("f", "B")), BooleanClause.Occur.SHOULD)
+            .build()
+    };
+    for (Query query : queries) {
+      TopDocsCollector collector = doSearchWithThreshold(5, 0, query, indexReader);
+      TopDocs tdc = doConcurrentSearchWithThreshold(5, 0, query, indexReader);
+      TopDocs tdc2 = collector.topDocs();
+
+      assertTrue(tdc.totalHits.value > 0);
+      assertTrue(tdc2.totalHits.value > 0);
+      CheckHits.checkEqual(query, tdc.scoreDocs, tdc2.scoreDocs);
+    }
+
+    indexReader.close();
+    dir.close();
+  }
+
+  public void testRealisticConcurrentMinimumScore() throws Exception {
     Directory dir = newDirectory();
     RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
     try (LineFileDocs docs = new LineFileDocs(random())) {
@@ -432,8 +599,8 @@ public class TestTopDocsCollector extends LuceneTestCase {
         BytesRef term = BytesRef.deepCopyOf(termsEnum.term());
         Query query = new TermQuery(new Term("body", term));
 
-        TopDocsCollector collector = doSearchWithThreshold(5, 10);
-        TopDocs tdc = doConcurrentSearchWithThreshold(5, 10, reader);
+        TopDocsCollector collector = doSearchWithThreshold(5, 0, query, reader);
+        TopDocs tdc = doConcurrentSearchWithThreshold(5, 0, query, reader);
         TopDocs tdc2 = collector.topDocs();
 
         CheckHits.checkEqual(query, tdc.scoreDocs, tdc2.scoreDocs);
@@ -443,5 +610,4 @@ public class TestTopDocsCollector extends LuceneTestCase {
     reader.close();
     dir.close();
   }
-
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
index c8741ee..33e522c 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
@@ -26,6 +26,7 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
 import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.StringField;
@@ -72,6 +73,31 @@ public class TestTopFieldCollector extends LuceneTestCase {
     dir.close();
     super.tearDown();
   }
+
+  private TopFieldCollector doSearchWithThreshold(int numResults, int thresHold, Query q, Sort sort, IndexReader indexReader) throws IOException {
+    IndexSearcher searcher = new IndexSearcher(indexReader);
+    TopFieldCollector tdc = TopFieldCollector.create(sort, numResults, thresHold);
+    searcher.search(q, tdc);
+    return tdc;
+  }
+
+  private TopDocs doConcurrentSearchWithThreshold(int numResults, int threshold, Query q, Sort sort, IndexReader indexReader) throws IOException {
+    ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
+        new LinkedBlockingQueue<Runnable>(),
+        new NamedThreadFactory("TestTopDocsCollector"));
+    try {
+      IndexSearcher searcher = new IndexSearcher(indexReader, service);
+
+      CollectorManager collectorManager = TopFieldCollector.createSharedManager(sort, numResults,
+          null, threshold);
+
+      TopDocs tdc = (TopDocs) searcher.search(q, collectorManager);
+
+      return tdc;
+    } finally {
+      service.shutdown();
+    }
+  }
   
   public void testSortWithoutFillFields() throws Exception {
     
@@ -495,4 +521,173 @@ public class TestTopFieldCollector extends LuceneTestCase {
     dir.close();
   }
 
+  public void testConcurrentMinScore() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
+    Document doc = new Document();
+    w.addDocuments(Arrays.asList(doc, doc, doc, doc, doc));
+    w.flush();
+    w.addDocuments(Arrays.asList(doc, doc, doc, doc, doc, doc));
+    w.flush();
+    w.addDocuments(Arrays.asList(doc, doc));
+    w.flush();
+    IndexReader reader = DirectoryReader.open(w);
+    assertEquals(3, reader.leaves().size());
+    w.close();
+
+    Sort sort = new Sort(SortField.FIELD_SCORE, SortField.FIELD_DOC);
+    CollectorManager<TopFieldCollector, TopFieldDocs> manager =
+        TopFieldCollector.createSharedManager(sort, 2, null, 0);
+    TopFieldCollector collector = manager.newCollector();
+    TopFieldCollector collector2 = manager.newCollector();
+    assertTrue(collector.minScoreAcc == collector2.minScoreAcc);
+    MaxScoreAccumulator minValueChecker = collector.minScoreAcc;
+    // force the check of the global minimum score on every round
+    minValueChecker.modInterval = 0;
+
+    ScoreAndDoc scorer = new ScoreAndDoc();
+    ScoreAndDoc scorer2 = new ScoreAndDoc();
+
+    LeafCollector leafCollector = collector.getLeafCollector(reader.leaves().get(0));
+    leafCollector.setScorer(scorer);
+    LeafCollector leafCollector2 = collector2.getLeafCollector(reader.leaves().get(1));
+    leafCollector2.setScorer(scorer2);
+
+    scorer.doc = 0;
+    scorer.score = 3;
+    leafCollector.collect(0);
+    assertNull(minValueChecker.get());
+    assertNull(scorer.minCompetitiveScore);
+
+    scorer2.doc = 0;
+    scorer2.score = 6;
+    leafCollector2.collect(0);
+    assertNull(minValueChecker.get());
+    assertNull(scorer2.minCompetitiveScore);
+
+    scorer.doc = 1;
+    scorer.score = 2;
+    leafCollector.collect(1);
+    assertEquals(2f, minValueChecker.get().score, 0f);
+    assertEquals(2f, scorer.minCompetitiveScore, 0f);
+    assertNull(scorer2.minCompetitiveScore);
+
+    scorer2.doc = 1;
+    scorer2.score = 9;
+    leafCollector2.collect(1);
+    assertEquals(6f, minValueChecker.get().score, 0f);
+    assertEquals(2f, scorer.minCompetitiveScore, 0f);
+    assertEquals(6f, scorer2.minCompetitiveScore, 0f);
+
+    scorer2.doc = 2;
+    scorer2.score = 7;
+    leafCollector2.collect(2);
+    assertEquals(7f, minValueChecker.get().score, 0f);
+    assertEquals(2f, scorer.minCompetitiveScore, 0f);
+    assertEquals(7f, scorer2.minCompetitiveScore, 0f);
+
+    scorer2.doc = 3;
+    scorer2.score = 1;
+    leafCollector2.collect(3);
+    assertEquals(7f, minValueChecker.get().score, 0f);
+    assertEquals(2f, scorer.minCompetitiveScore, 0f);
+    assertEquals(7f, scorer2.minCompetitiveScore, 0f);
+
+    scorer.doc = 2;
+    scorer.score = 10;
+    leafCollector.collect(2);
+    assertEquals(7f, minValueChecker.get().score, 0f);
+    assertEquals(7f, scorer.minCompetitiveScore, 0f);
+    assertEquals(7f, scorer2.minCompetitiveScore, 0f);
+
+    scorer.doc = 3;
+    scorer.score = 11;
+    leafCollector.collect(3);
+    assertEquals(10f, minValueChecker.get().score, 0f);
+    assertEquals(10f, scorer.minCompetitiveScore, 0f);
+    assertEquals(7f, scorer2.minCompetitiveScore, 0f);
+
+    TopFieldCollector collector3 = manager.newCollector();
+    LeafCollector leafCollector3 = collector3.getLeafCollector(reader.leaves().get(2));
+    ScoreAndDoc scorer3 = new ScoreAndDoc();
+    leafCollector3.setScorer(scorer3);
+    assertEquals(10f, scorer3.minCompetitiveScore, 0f);
+
+    scorer3.doc = 0;
+    scorer3.score = 1f;
+    leafCollector3.collect(0);
+    assertEquals(10f, minValueChecker.get().score, 0f);
+    assertEquals(10f, scorer3.minCompetitiveScore, 0f);
+
+    scorer.doc = 4;
+    scorer.score = 11;
+    leafCollector.collect(4);
+    assertEquals(11f, minValueChecker.get().score, 0f);
+    assertEquals(11f, scorer.minCompetitiveScore, 0f);
+    assertEquals(7f, scorer2.minCompetitiveScore, 0f);
+    assertEquals(10f, scorer3.minCompetitiveScore, 0f);
+
+    scorer3.doc = 1;
+    scorer3.score = 2f;
+    leafCollector3.collect(1);
+    assertEquals(11f, minValueChecker.get().score, 0f);
+    assertEquals(11f, scorer.minCompetitiveScore, 0f);
+    assertEquals(7f, scorer2.minCompetitiveScore, 0f);
+    assertEquals(11f, scorer3.minCompetitiveScore, 0f);
+
+
+    TopFieldDocs topDocs = manager.reduce(Arrays.asList(collector, collector2, collector3));
+    assertEquals(11, topDocs.totalHits.value);
+    assertEquals(new TotalHits(11, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), topDocs.totalHits);
+
+    reader.close();
+    dir.close();
+  }
+
+  public void testRandomMinCompetitiveScore() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig());
+    int numDocs = atLeast(1000);
+    for (int i = 0; i < numDocs; ++i) {
+      int numAs = 1 + random().nextInt(5);
+      int numBs = random().nextFloat() < 0.5f ?  0 : 1 + random().nextInt(5);
+      int numCs = random().nextFloat() < 0.1f ?  0 : 1 + random().nextInt(5);
+      Document doc = new Document();
+      for (int j = 0; j < numAs; ++j) {
+        doc.add(new StringField("f", "A", Field.Store.NO));
+      }
+      for (int j = 0; j < numBs; ++j) {
+        doc.add(new StringField("f", "B", Field.Store.NO));
+      }
+      for (int j = 0; j < numCs; ++j) {
+        doc.add(new StringField("f", "C", Field.Store.NO));
+      }
+      w.addDocument(doc);
+    }
+    IndexReader indexReader = w.getReader();
+    w.close();
+    Query[] queries = new Query[]{
+        new TermQuery(new Term("f", "A")),
+        new TermQuery(new Term("f", "B")),
+        new TermQuery(new Term("f", "C")),
+        new BooleanQuery.Builder()
+            .add(new TermQuery(new Term("f", "A")), BooleanClause.Occur.MUST)
+            .add(new TermQuery(new Term("f", "B")), BooleanClause.Occur.SHOULD)
+            .build()
+    };
+    for (Query query : queries) {
+      Sort sort = new Sort(new SortField[]{SortField.FIELD_SCORE, SortField.FIELD_DOC});
+      TopFieldCollector fieldCollector = doSearchWithThreshold(5, 0, query, sort, indexReader);
+      TopDocs tdc = doConcurrentSearchWithThreshold(5, 0, query, sort, indexReader);
+      TopDocs tdc2 = fieldCollector.topDocs();
+
+      assertTrue(tdc.totalHits.value > 0);
+      assertTrue(tdc2.totalHits.value > 0);
+      CheckHits.checkEqual(query, tdc.scoreDocs, tdc2.scoreDocs);
+    }
+
+    indexReader.close();
+    dir.close();
+  }
+
 }
diff --git a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
index fd80be6..9c0b2b9 100644
--- a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
+++ b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java
@@ -45,6 +45,8 @@ import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.TestUtil;
 
+import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
+
 public class TestBKD extends LuceneTestCase {
 
   public void testBasicInts1D() throws Exception {
@@ -63,7 +65,7 @@ public class TestBKD extends LuceneTestCase {
 
       try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) {
         in.seek(indexFP);
-        BKDReader r = new BKDReader(in);
+        BKDReader r = new BKDReader(in, randomBoolean());
 
         // Simple 1D range query:
         final int queryMin = 42;
@@ -165,7 +167,7 @@ public class TestBKD extends LuceneTestCase {
 
       try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) {
         in.seek(indexFP);
-        BKDReader r = new BKDReader(in);
+        BKDReader r = new BKDReader(in, randomBoolean());
 
         byte[] minPackedValue = r.getMinPackedValue();
         byte[] maxPackedValue = r.getMaxPackedValue();
@@ -293,7 +295,7 @@ public class TestBKD extends LuceneTestCase {
 
       try (IndexInput in = dir.openInput("bkd", IOContext.DEFAULT)) {
         in.seek(indexFP);
-        BKDReader r = new BKDReader(in);
+        BKDReader r = new BKDReader(in, randomBoolean());
 
         int iters = atLeast(100);
         for(int iter=0;iter<iters;iter++) {
@@ -785,7 +787,7 @@ public class TestBKD extends LuceneTestCase {
         List<BKDReader> readers = new ArrayList<>();
         for(long fp : toMerge) {
           in.seek(fp);
-          readers.add(new BKDReader(in));
+          readers.add(new BKDReader(in, randomBoolean()));
         }
         out = dir.createOutput("bkd2", IOContext.DEFAULT);
         indexFP = w.merge(out, docMaps, readers);
@@ -799,7 +801,7 @@ public class TestBKD extends LuceneTestCase {
       }
 
       in.seek(indexFP);
-      BKDReader r = new BKDReader(in);
+      BKDReader r = new BKDReader(in, randomBoolean());
 
       int iters = atLeast(100);
       for(int iter=0;iter<iters;iter++) {
@@ -1073,7 +1075,7 @@ public class TestBKD extends LuceneTestCase {
 
       IndexInput in = dir.openInput("bkd", IOContext.DEFAULT);
       in.seek(fp);
-      BKDReader r = new BKDReader(in);
+      BKDReader r = new BKDReader(in, randomBoolean());
       r.intersect(new IntersectVisitor() {
           int lastDocID = -1;
 
@@ -1187,7 +1189,7 @@ public class TestBKD extends LuceneTestCase {
 
       IndexInput in = dir.openInput("bkd", IOContext.DEFAULT);
       in.seek(fp);
-      BKDReader r = new BKDReader(in);
+      BKDReader r = new BKDReader(in, randomBoolean());
       int[] count = new int[1];
       r.intersect(new IntersectVisitor() {
 
@@ -1242,7 +1244,7 @@ public class TestBKD extends LuceneTestCase {
 
     IndexInput in = dir.openInput("bkd", IOContext.DEFAULT);
     in.seek(fp);
-    BKDReader r = new BKDReader(in);
+    BKDReader r = new BKDReader(in, randomBoolean());
     int[] count = new int[1];
     r.intersect(new IntersectVisitor() {
 
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TopGroupsTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TopGroupsTest.java
new file mode 100644
index 0000000..8fb661d
--- /dev/null
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TopGroupsTest.java
@@ -0,0 +1,235 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.search.grouping;
+
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TotalHits;
+import org.apache.lucene.util.LuceneTestCase;
+
+import org.junit.Ignore;
+
+public class TopGroupsTest extends LuceneTestCase {
+
+  @Ignore // https://issues.apache.org/jira/browse/LUCENE-8996
+  public void testAllGroupsEmptyInSecondPass() {
+    narrativeMergeTestImplementation(false, false, false, false);
+  }
+
+  @Ignore // https://issues.apache.org/jira/browse/LUCENE-8996
+  public void testSomeGroupsEmptyInSecondPass() {
+    narrativeMergeTestImplementation(false, false, false, true);
+    narrativeMergeTestImplementation(false, false, true, false);
+    narrativeMergeTestImplementation(false, false, true, true);
+
+    narrativeMergeTestImplementation(false, true, false, false);
+    narrativeMergeTestImplementation(false, true, false, true);
+    narrativeMergeTestImplementation(false, true, true, false);
+    narrativeMergeTestImplementation(false, true, true, true);
+
+    narrativeMergeTestImplementation(true, false, false, false);
+    narrativeMergeTestImplementation(true, false, false, true);
+    narrativeMergeTestImplementation(true, false, true, false);
+    narrativeMergeTestImplementation(true, false, true, true);
+
+    narrativeMergeTestImplementation(true, true, false, false);
+    narrativeMergeTestImplementation(true, true, false, true);
+    narrativeMergeTestImplementation(true, true, true, false);
+  }
+
+  public void testNoGroupsEmptyInSecondPass() {
+    narrativeMergeTestImplementation(true, true, true, true);
+  }
+
+  /*
+   * This method implements tests for the <code>TopGroup.merge</code> method
+   * using a narrative approach. Use of a creative narrative may seem unusual
+   * or even silly but the idea behind it is to make it hopefully easier to
+   * reason about the documents and groups and scores in the test whilst testing
+   * several scenario permutations.
+   *
+   * Imagine:
+   *
+   * Each document represents (say) a picture book of an animal.
+   * We are searching for two books and wish to draw a picture of our own, inspired by the books.
+   * We think that large animals are easier to draw and therefore order the books by the featured animal's size.
+   * We think that different colors would make for a good drawing and therefore group the books by the featured animal's color.
+   *
+   * Index content:
+   *
+   * The documents are in 2 groups ("blue" and "red") and there are 4 documents across 2 shards:
+   * shard 1 (blue whale, red ant) and shard 2 (blue dragonfly, red squirrel).
+   *
+   * If all documents are present the "blue whale" and the "red squirrel" documents would be returned
+   * for our drawing since they are the largest animals in their respective groups.
+   *
+   * Test permutations (haveBlueWhale, haveRedAnt, haveBlueDragonfly, haveRedSquirrel) arise because
+   * in the first pass of the search all documents can be present, but
+   * in the second pass of the search some documents could be missing
+   * if they have been deleted 'just so' between the two phases.
+   *
+   * Additionally a <code>haveAnimal == false</code> condition also represents scenarios where a given
+   * group has documents on some but not all shards in the collection.
+   */
+  private void narrativeMergeTestImplementation(
+      boolean haveBlueWhale,
+      boolean haveRedAnt,
+      boolean haveBlueDragonfly,
+      boolean haveRedSquirrel) {
+
+    final String blueGroupValue = "blue";
+    final String redGroupValue = "red";
+
+    final Integer redAntSize = 1;
+    final Integer blueDragonflySize = 10;
+    final Integer redSquirrelSize = 100;
+    final Integer blueWhaleSize = 1000;
+
+    final float redAntScore = redAntSize;
+    final float blueDragonflyScore = blueDragonflySize;
+    final float redSquirrelScore = redSquirrelSize;
+    final float blueWhaleScore = blueWhaleSize;
+
+    final Sort sort = Sort.RELEVANCE;
+
+    final TopGroups<String> shard1TopGroups;
+    {
+      final GroupDocs<String> group1 = haveBlueWhale
+          ? createSingletonGroupDocs(blueGroupValue, new Object[] { blueWhaleSize }, 1 /* docId */, blueWhaleScore, 0 /* shardIndex */)
+              : createEmptyGroupDocs(blueGroupValue, new Object[] { blueWhaleSize });
+
+      final GroupDocs<String> group2 = haveRedAnt
+          ? createSingletonGroupDocs(redGroupValue, new Object[] { redAntSize }, 2 /* docId */, redAntScore, 0 /* shardIndex */)
+              : createEmptyGroupDocs(redGroupValue, new Object[] { redAntSize });
+
+      shard1TopGroups = new TopGroups<String>(
+          sort.getSort() /* groupSort */,
+          sort.getSort() /* withinGroupSort */,
+          group1.scoreDocs.length + group2.scoreDocs.length /* totalHitCount */,
+          group1.scoreDocs.length + group2.scoreDocs.length /* totalGroupedHitCount */,
+          combineGroupDocs(group1, group2) /* groups */,
+          (haveBlueWhale ? blueWhaleScore : (haveRedAnt ? redAntScore : Float.NaN)) /* maxScore */);
+    }
+
+    final TopGroups<String> shard2TopGroups;
+    {
+      final GroupDocs<String> group1 = haveBlueDragonfly
+          ? createSingletonGroupDocs(blueGroupValue, new Object[] { blueDragonflySize }, 3 /* docId */, blueDragonflyScore, 1 /* shardIndex */)
+              : createEmptyGroupDocs(blueGroupValue, new Object[] { blueDragonflySize });
+
+      final GroupDocs<String> group2 = haveRedSquirrel
+      ? createSingletonGroupDocs(redGroupValue, new Object[] { redSquirrelSize }, 4 /* docId */, redSquirrelScore, 1 /* shardIndex */)
+          : createEmptyGroupDocs(redGroupValue, new Object[] { redSquirrelSize });
+
+      shard2TopGroups = new TopGroups<String>(
+          sort.getSort() /* groupSort */,
+          sort.getSort() /* withinGroupSort */,
+          group1.scoreDocs.length + group2.scoreDocs.length /* totalHitCount */,
+          group1.scoreDocs.length + group2.scoreDocs.length /* totalGroupedHitCount */,
+          combineGroupDocs(group1, group2) /* groups */,
+          (haveRedSquirrel ? redSquirrelScore : (haveBlueDragonfly ? blueDragonflyScore : Float.NaN)) /* maxScore */);
+    }
+
+    final TopGroups<String> mergedTopGroups = TopGroups.<String>merge(
+        combineTopGroups(shard1TopGroups, shard2TopGroups),
+        sort /* groupSort */,
+        sort /* docSort */,
+        0 /* docOffset */,
+        2 /* docTopN */,
+        TopGroups.ScoreMergeMode.None);
+    assertNotNull(mergedTopGroups);
+
+    final int expectedCount =
+        (haveBlueWhale     ? 1 : 0) +
+        (haveRedAnt        ? 1 : 0) +
+        (haveBlueDragonfly ? 1 : 0) +
+        (haveRedSquirrel   ? 1 : 0);
+
+    assertEquals(expectedCount, mergedTopGroups.totalHitCount);
+    assertEquals(expectedCount, mergedTopGroups.totalGroupedHitCount);
+
+    assertEquals(2, mergedTopGroups.groups.length);
+    {
+      assertEquals(blueGroupValue, mergedTopGroups.groups[0].groupValue);
+      final float expectedBlueMaxScore =
+          (haveBlueWhale ? blueWhaleScore : (haveBlueDragonfly ? blueDragonflyScore : Float.MIN_VALUE));
+      checkMaxScore(expectedBlueMaxScore, mergedTopGroups.groups[0].maxScore);
+    }
+    {
+      assertEquals(redGroupValue, mergedTopGroups.groups[1].groupValue);
+      final float expectedRedMaxScore =
+          (haveRedSquirrel ? redSquirrelScore : (haveRedAnt ? redAntScore : Float.MIN_VALUE));
+      checkMaxScore(expectedRedMaxScore, mergedTopGroups.groups[1].maxScore);
+    }
+
+    final float expectedMaxScore =
+        (haveBlueWhale ? blueWhaleScore
+            : (haveRedSquirrel ? redSquirrelScore
+                : (haveBlueDragonfly ? blueDragonflyScore
+                    : (haveRedAnt ? redAntScore
+                        : Float.MIN_VALUE))));
+    checkMaxScore(expectedMaxScore, mergedTopGroups.maxScore);
+  }
+
+  private static void checkMaxScore(float expected, float actual) {
+    if (Float.isNaN(expected)) {
+      assertTrue(Float.isNaN(actual));
+    } else {
+      assertEquals(expected, actual, 0.0);
+    }
+  }
+
+  // helper methods
+
+  private static GroupDocs<String> createEmptyGroupDocs(String groupValue, Object[] groupSortValues) {
+    return new  GroupDocs<String>(
+        Float.NaN /* score */,
+        Float.NaN /* maxScore */,
+        new TotalHits(0, TotalHits.Relation.EQUAL_TO),
+        new ScoreDoc[0],
+        groupValue,
+        groupSortValues);
+    }
+
+  private static GroupDocs<String> createSingletonGroupDocs(String groupValue, Object[] groupSortValues,
+      int docId, float docScore, int shardIndex) {
+    return new  GroupDocs<String>(
+        Float.NaN /* score */,
+        docScore /* maxScore */,
+        new TotalHits(1, TotalHits.Relation.EQUAL_TO),
+        new ScoreDoc[] { new ScoreDoc(docId, docScore, shardIndex) },
+        groupValue,
+        groupSortValues);
+    }
+
+  private static GroupDocs<String>[] combineGroupDocs(GroupDocs<String> group0, GroupDocs<String> group1) {
+    @SuppressWarnings({"unchecked","rawtypes"})
+    final GroupDocs<String>[] groups = new GroupDocs[2];
+    groups[0] = group0;
+    groups[1] = group1;
+    return groups;
+  }
+
+  private static TopGroups<String>[] combineTopGroups(TopGroups<String> group0, TopGroups<String> group1) {
+    @SuppressWarnings({"unchecked","rawtypes"})
+    final TopGroups<String>[] groups = new TopGroups[2];
+    groups[0] = group0;
+    groups[1] = group1;
+    return groups;
+  }
+
+}
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 729e3a5..870b14b 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -224,9 +224,10 @@ org.apache.zookeeper.version = 3.5.5
 /org.apache.zookeeper/zookeeper = ${org.apache.zookeeper.version}
 /org.apache.zookeeper/zookeeper-jute = ${org.apache.zookeeper.version}
 
-# v1.6.0-alpha.5 of asciidoctor-ant includes asciidoctorj-pdf 1.5.0-alpha.16,
-# which is the same as asciidoctor-pdf 1.5.0-alpha.16
-/org.asciidoctor/asciidoctor-ant = 1.6.0-alpha.5
+# v1.6.2 of asciidoctor-ant includes asciidoctorj 1.6.2, which uses
+# asciidoctor 1.5.8, and asciidoctorj-pdf 1.5.0-alpha.16, which is the same
+# as asciidoctor-pdf 1.5.0-alpha.16
+/org.asciidoctor/asciidoctor-ant = 1.6.2
 
 /org.aspectj/aspectjrt = 1.8.0
 
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
index cd1f1f0..183bca1 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java
@@ -190,6 +190,13 @@ final class GlobalOrdinalsWithScoreQuery extends Query implements Accountable {
       }
     }
 
+    @Override
+    public boolean isCacheable(LeafReaderContext ctx) {
+      // disable caching because this query relies on a top reader context
+      // and holds a bitset of matching ordinals that cannot be accounted in
+      // the memory used by the cache
+      return false;
+    }
   }
 
   final static class OrdinalMapScorer extends BaseGlobalOrdinalScorer {
diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Line2D.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Line2D.java
index 15c923e..02d1422 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/geo/Line2D.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Line2D.java
@@ -107,15 +107,21 @@ public final class Line2D implements Component2D {
       if (tree.isPointOnLine(ax, ay)) {
         return Relation.CELL_INSIDE_QUERY;
       }
-    } else if ((ax == cx && ay == cy) || (bx == cx && by == cy)) {
+    } else if (ax == cx && ay == cy) {
       // indexed "triangle" is a line:
       if (tree.crossesLine(minX, maxX, minY, maxY, ax, ay, bx, by)) {
         return Relation.CELL_CROSSES_QUERY;
       }
       return Relation.CELL_OUTSIDE_QUERY;
-    } else if ((ax == bx && ay == by)) {
+    } else if (ax == bx && ay == by) {
       // indexed "triangle" is a line:
-      if (tree.crossesLine(minX, maxX, minY, maxY, ax, ay, cx, cy)) {
+      if (tree.crossesLine(minX, maxX, minY, maxY, bx, by, cx, cy)) {
+        return Relation.CELL_CROSSES_QUERY;
+      }
+      return Relation.CELL_OUTSIDE_QUERY;
+    } else if (bx == cx && by == cy) {
+      // indexed "triangle" is a line:
+      if (tree.crossesLine(minX, maxX, minY, maxY, cx, cy, ax, ay)) {
         return Relation.CELL_CROSSES_QUERY;
       }
       return Relation.CELL_OUTSIDE_QUERY;
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
index c936a35..6939e1f 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
@@ -373,26 +373,26 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
   }
   
   public static void assertAnalyzesTo(Analyzer a, String input, String[] output, int startOffsets[], int endOffsets[], String types[], int posIncrements[]) throws IOException {
+    assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, null, input.length());
     checkResetException(a, input);
     checkAnalysisConsistency(random(), a, true, input);
-    assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, null, input.length());
   }
   
   public static void assertAnalyzesTo(Analyzer a, String input, String[] output, int startOffsets[], int endOffsets[], String types[], int posIncrements[], int posLengths[]) throws IOException {
+    assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, posLengths, input.length());
     checkResetException(a, input);
     checkAnalysisConsistency(random(), a, true, input);
-    assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, posLengths, input.length());
   }
 
   public static void assertAnalyzesTo(Analyzer a, String input, String[] output, int startOffsets[], int endOffsets[], String types[], int posIncrements[], int posLengths[], boolean graphOffsetsAreCorrect) throws IOException {
+    assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, posLengths, input.length(), graphOffsetsAreCorrect);
     checkResetException(a, input);
     checkAnalysisConsistency(random(), a, true, input, graphOffsetsAreCorrect);
-    assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, posLengths, input.length(), graphOffsetsAreCorrect);
   }
 
   public static void assertAnalyzesTo(Analyzer a, String input, String[] output, int startOffsets[], int endOffsets[], String types[], int posIncrements[], int posLengths[], boolean graphOffsetsAreCorrect, byte[][] payloads) throws IOException {
-    checkResetException(a, input);
     assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, posLengths, input.length(), null, null, graphOffsetsAreCorrect, payloads);
+    checkResetException(a, input);
   }
 
   public static void assertAnalyzesTo(Analyzer a, String input, String[] output) throws IOException {
@@ -948,13 +948,8 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
     w.close();
   }
   
-  static int[] toIntArray(List<Integer> list) {
-    int ret[] = new int[list.size()];
-    int offset = 0;
-    for (Integer i : list) {
-      ret[offset++] = i;
-    }
-    return ret;
+  private static int[] toIntArray(List<Integer> list) {
+    return list.stream().mapToInt(Integer::intValue).toArray();
   }
 
   protected static MockTokenizer whitespaceMockTokenizer(Reader input) throws IOException {
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f1571b1..e68a482 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -66,6 +66,8 @@ Upgrade Notes
 
 * SOLR-13593 SOLR-13690 SOLR-13691: Allow to look up analyzer components by their SPI names in field type configuration. (Tomoko Uchida)
 
+* SOLR-13854: SolrMetricProducer API has changed and user components that implement this API need to be updated. (ab)
+
 Improvements
 ----------------------
 
@@ -98,12 +100,18 @@ Upgrade Notes
 
 New Features
 ---------------------
-(No changes)
+* SOLR-13821: A Package store to store and load package artifacts (noble, Ishan Chattopadhyaya)
+
+* SOLR-13822: A Package management system with the following features. A packages.json in ZK to store
+  the configuration, APIs to read/edit them and isolated classloaders to load the classes from
+  hose packages if the 'class' attribute is prefixed with `<package-name>:` (noble, Ishan Chattopadhyaya)
 
 Improvements
 ---------------------
 
-*SOLR-13731: javabin must support a 1:1 mapping of the JSON update format (noble)
+* SOLR-13731: 'javabin' must support a 1:1 mapping of the JSON update format (noble)
+
+* SOLR-13831: Support defining arbitrary autoscaling simulation scenarios. (ab)
 
 
 Optimizations
@@ -112,11 +120,24 @@ Optimizations
 
 Bug Fixes
 ---------------------
-(No changes)
+
+* SOLR-13827: Fail on unknown operation in Request Parameters API (Munendra S N, noble)
+
+* SOLR-13403: Fix NPE in TermsComponent for DatePointField (yonik, Munendra S N)
+
+* SOLR-9802: Fix grouping failure for date field in solrcloud (Erick Erickson, Munendra S N, Vitaly Lavrov)
+
+* SOLR-12393: Compute score if requested even when expanded docs not sorted by score in ExpandComponent.
+  (David Smiley, Munendra S N)
 
 Other Changes
 ---------------------
-(No changes)
+
+* SOLR-12769: Fix incorrect documentation for 'delete' op in Request parameters API (Alexandre Rafalovitch, Munendra S N)
+
+* SOLR-13841: Add jackson databind annotations to SolrJ classpath (noble)
+
+* SOLR-13824: Strictly reject anything after JSON in most APIs (Mikhail Khludnev, Munendra S N)
 
 ==================  8.3.0 ==================
 
@@ -181,15 +202,13 @@ New Features
 
 * SOLR-8241: Add CaffeineCache, an efficient implementation of SolrCache.(Ben Manes, Shawn Heisey, David Smiley, Andrzej Bialecki)
 
-* SOLR-13821: A Package store to store and load package artefacts (noble, Ishan Chattopadhyaya)
-
 * SOLR-13298: Allow zplot to plot matrices (Joel Bernstein)
 
 Improvements
 ----------------------
 
 * SOLR-12368: Support InPlace DV updates for a field that does not yet exist in any documents
-(hossman, Simon Willnauer, Adrien Grand, Munendra S N)
+  (hossman, Simon Willnauer, Adrien Grand, Munendra S N)
 
 * SOLR-13558, SOLR-13693: Allow dynamic resizing of SolrCache-s. (ab)
 
@@ -329,6 +348,16 @@ Bug Fixes
 * SOLR-13834: ZkController#getSolrCloudManager() created a new instance of ZkStateReader, thereby causing mismatch in the
   visibility of the cluster state and, as a result, undesired race conditions (Clay Goddard via Ishan Chattopadhyaya)
 
+* SOLR-13835: HttpSolrCall produces incorrect extra AuditEvent on AuthorizationResponse.PROMPT (janhoy, hossman)
+
+* SOLR-13843: The MOVEREPLICA API ignores replica type and always adds 'nrt' replicas (Amrit Sarkar via shalin)
+
+* SOLR-13677: All Metrics Gauges should be unregistered by components that registered them. (noble, ab)
+
+* SOLR-13855: DistributedZkUpdateProcessor should have been propagating URP.finish() lifecycle like it used to before
+  8.1 (a regression).  Impacts integrity since Run URP's finish() propagates this to the updateLog to fsync.
+  (David Smiley)
+
 Other Changes
 ----------------------
 
@@ -364,6 +393,8 @@ Other Changes
 
 * SOLR-13787: An annotation based system to write v2 APIs (noble)
 
+* SOLR-12786: Update Ref Guide build tool versions (Cassandra)
+
 ==================  8.2.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
index 50938e4..8b64b6f 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
@@ -36,7 +36,7 @@ import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.RawResponseWriter;
 import org.apache.solr.response.SolrQueryResponse;
@@ -275,8 +275,8 @@ public class DataImportHandler extends RequestHandlerBase implements
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    super.initializeMetrics(manager, registryName, tag, scope);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    super.initializeMetrics(parentContext, scope);
     metrics = new MetricsMap((detailed, map) -> {
       if (importer != null) {
         DocBuilder.Statistics cumulative = importer.cumulativeStatistics;
@@ -299,7 +299,7 @@ public class DataImportHandler extends RequestHandlerBase implements
         map.put(DataImporter.MSG.TOTAL_DOCS_SKIPPED, cumulative.skipDocCount);
       }
     });
-    manager.registerGauge(this, registryName, metrics, tag, true, "importer", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, metrics, true, "importer", getCategory().toString(), scope);
   }
 
   // //////////////////////SolrInfoMBeans methods //////////////////////
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/DefaultWrapperModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/DefaultWrapperModel.java
index bdb62d9..a56fe82 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/DefaultWrapperModel.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/DefaultWrapperModel.java
@@ -94,7 +94,7 @@ public class DefaultWrapperModel extends WrapperModel {
   @SuppressWarnings("unchecked")
   protected Map<String, Object> parseInputStream(InputStream in) throws IOException {
     try (Reader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) {
-      return (Map<String, Object>) new ObjectBuilder(new JSONParser(reader)).getVal();
+      return (Map<String, Object>) new ObjectBuilder(new JSONParser(reader)).getValStrict();
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
index eca2283..a6ae1be 100644
--- a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
+++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
@@ -227,6 +227,7 @@ public class AnnotatedApi extends Api implements PermissionNameProvider {
           }
           if (isWrappedInPayloadObj) {
             PayloadObj<Object> payloadObj = new PayloadObj<>(cmd.name, cmd.getCommandData(), o);
+            cmd = payloadObj;
             method.invoke(obj, req, rsp, payloadObj);
           } else {
             method.invoke(obj, req, rsp, o);
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
index 407d548..1558389 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
@@ -244,12 +244,12 @@ public class CloudUtil {
                                                       boolean requireLeaders) {
     return (liveNodes, collectionState) -> {
       if (collectionState == null) {
-        log.info("-- null collection");
+        log.debug("-- null collection");
         return false;
       }
       Collection<Slice> slices = withInactive ? collectionState.getSlices() : collectionState.getActiveSlices();
       if (slices.size() != expectedShards) {
-        log.info("-- wrong number of slices for collection {}, expected={}, found={}: {}", collectionState.getName(), expectedShards, collectionState.getSlices().size(), collectionState.getSlices());
+        log.debug("-- wrong number of slices for collection {}, expected={}, found={}: {}", collectionState.getName(), expectedShards, collectionState.getSlices().size(), collectionState.getSlices());
         return false;
       }
       Set<String> leaderless = new HashSet<>();
@@ -268,7 +268,7 @@ public class CloudUtil {
             activeReplicas++;
         }
         if (activeReplicas != expectedReplicas) {
-          log.info("-- wrong number of active replicas for collection {} in slice {}, expected={}, found={}", collectionState.getName(), slice.getName(), expectedReplicas, activeReplicas);
+          log.debug("-- wrong number of active replicas for collection {} in slice {}, expected={}, found={}", collectionState.getName(), slice.getName(), expectedReplicas, activeReplicas);
           return false;
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
index 957b321..17a6ec3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
@@ -133,7 +133,7 @@ public class ReplicateFromLeader {
 
   public void stopReplication() {
     if (replicationProcess != null) {
-      replicationProcess.close();
+      replicationProcess.shutdown();
     }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
index 4e462f6..9d5a049 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
@@ -218,7 +218,9 @@ public class MoveReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
         WAIT_FOR_FINAL_STATE, String.valueOf(waitForFinalState),
         SKIP_CREATE_REPLICA_IN_CLUSTER_STATE, skipCreateReplicaInClusterState,
         CoreAdminParams.ULOG_DIR, ulogDir.substring(0, ulogDir.lastIndexOf(UpdateLog.TLOG_NAME)),
-        CoreAdminParams.DATA_DIR, dataDir);
+        CoreAdminParams.DATA_DIR, dataDir,
+        ZkStateReader.REPLICA_TYPE, replica.getType().name());
+
     if(async!=null) addReplicasProps.getProperties().put(ASYNC, async);
     NamedList addResult = new NamedList();
     try {
@@ -272,7 +274,9 @@ public class MoveReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
         COLLECTION_PROP, coll.getName(),
         SHARD_ID_PROP, slice.getName(),
         CoreAdminParams.NODE, targetNode,
-        CoreAdminParams.NAME, newCoreName);
+        CoreAdminParams.NAME, newCoreName,
+        ZkStateReader.REPLICA_TYPE, replica.getType().name());
+
     if (async != null) addReplicasProps.getProperties().put(ASYNC, async);
     NamedList addResult = new NamedList();
     SolrCloseableLatch countDownLatch = new SolrCloseableLatch(1, ocmh);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
index b6755b1..1341a24 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
@@ -595,7 +595,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
   private static String fullName = SystemLogListener.class.getName();
   private static String solrName = "solr." + SystemLogListener.class.getSimpleName();
 
-  static AutoScalingConfig withSystemLogListener(AutoScalingConfig autoScalingConfig, String triggerName) {
+  public static AutoScalingConfig withSystemLogListener(AutoScalingConfig autoScalingConfig, String triggerName) {
     Map<String, AutoScalingConfig.TriggerListenerConfig> configs = autoScalingConfig.getTriggerListenerConfigs();
     for (AutoScalingConfig.TriggerListenerConfig cfg : configs.values()) {
       if (triggerName.equals(cfg.trigger)) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
index 9a83db2..2c22e3e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
@@ -134,6 +134,8 @@ public class ScheduledTriggers implements Closeable {
 
   private final TriggerListeners listeners;
 
+  private final List<TriggerListener> additionalListeners = new ArrayList<>();
+
   private AutoScalingConfig autoScalingConfig;
 
   public ScheduledTriggers(SolrResourceLoader loader, SolrCloudManager cloudManager) {
@@ -552,6 +554,22 @@ public class ScheduledTriggers implements Closeable {
     log.debug("ScheduledTriggers closed completely");
   }
 
+  /**
+   * Add a temporary listener for internal use (tests, simulation).
+   * @param listener listener instance
+   */
+  public void addAdditionalListener(TriggerListener listener) {
+    listeners.addAdditionalListener(listener);
+  }
+
+  /**
+   * Remove a temporary listener for internal use (tests, simulation).
+   * @param listener listener instance
+   */
+  public void removeAdditionalListener(TriggerListener listener) {
+    listeners.removeAdditionalListener(listener);
+  }
+
   private class TriggerWrapper implements Runnable, Closeable {
     AutoScaling.Trigger trigger;
     ScheduledFuture<?> scheduledFuture;
@@ -657,6 +675,7 @@ public class ScheduledTriggers implements Closeable {
   private class TriggerListeners {
     Map<String, Map<TriggerEventProcessorStage, List<TriggerListener>>> listenersPerStage = new HashMap<>();
     Map<String, TriggerListener> listenersPerName = new HashMap<>();
+    List<TriggerListener> additionalListeners = new ArrayList<>();
     ReentrantLock updateLock = new ReentrantLock();
 
     public TriggerListeners() {
@@ -680,6 +699,41 @@ public class ScheduledTriggers implements Closeable {
       return new TriggerListeners(listenersPerStage, listenersPerName);
     }
 
+    public void addAdditionalListener(TriggerListener listener) {
+      updateLock.lock();
+      try {
+        AutoScalingConfig.TriggerListenerConfig config = listener.getConfig();
+        for (TriggerEventProcessorStage stage : config.stages) {
+          addPerStage(config.trigger, stage, listener);
+        }
+        // add also for beforeAction / afterAction TriggerStage
+        if (!config.beforeActions.isEmpty()) {
+          addPerStage(config.trigger, TriggerEventProcessorStage.BEFORE_ACTION, listener);
+        }
+        if (!config.afterActions.isEmpty()) {
+          addPerStage(config.trigger, TriggerEventProcessorStage.AFTER_ACTION, listener);
+        }
+        additionalListeners.add(listener);
+      } finally {
+        updateLock.unlock();
+      }
+    }
+
+    public void removeAdditionalListener(TriggerListener listener) {
+      updateLock.lock();
+      try {
+        listenersPerName.remove(listener.getConfig().name);
+        listenersPerStage.forEach((trigger, perStage) -> {
+          perStage.forEach((stage, listeners) -> {
+            listeners.remove(listener);
+          });
+        });
+        additionalListeners.remove(listener);
+      } finally {
+        updateLock.unlock();
+      }
+    }
+
     void setAutoScalingConfig(AutoScalingConfig autoScalingConfig) {
       updateLock.lock();
       // we will recreate this from scratch
@@ -756,6 +810,13 @@ public class ScheduledTriggers implements Closeable {
             addPerStage(config.trigger, TriggerEventProcessorStage.AFTER_ACTION, listener);
           }
         }
+        // re-add additional listeners
+        List<TriggerListener> additional = new ArrayList<>(additionalListeners);
+        additionalListeners.clear();
+        for (TriggerListener listener : additional) {
+          addAdditionalListener(listener);
+        }
+
       } finally {
         updateLock.unlock();
       }
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/FakeDocIterator.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/FakeDocIterator.java
similarity index 100%
rename from solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/FakeDocIterator.java
rename to solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/FakeDocIterator.java
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
index a029ac3..3bcc273 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
@@ -98,6 +98,7 @@ import org.apache.solr.metrics.AltBufferPoolMetricSet;
 import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.OperatingSystemMetricSet;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.util.DefaultSolrThreadFactory;
@@ -482,7 +483,8 @@ public class SimCloudManager implements SolrCloudManager {
     if (metricsHistoryHandler == null && liveNodesSet.size() == 1) {
       metricsHandler = new MetricsHandler(metricManager);
       metricsHistoryHandler = new MetricsHistoryHandler(nodeId, metricsHandler, solrClient, this, new HashMap<>());
-      metricsHistoryHandler.initializeMetrics(metricManager, SolrMetricManager.getRegistryName(SolrInfoBean.Group.node), metricTag, CommonParams.METRICS_HISTORY_PATH);
+      SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, SolrMetricManager.getRegistryName(SolrInfoBean.Group.node), metricTag);
+      metricsHistoryHandler.initializeMetrics(solrMetricsContext, CommonParams.METRICS_HISTORY_PATH);
     }
     return nodeId;
   }
@@ -842,11 +844,14 @@ public class SimCloudManager implements SolrCloudManager {
     String a = params != null ? params.get(CoreAdminParams.ACTION) : null;
     SolrResponse rsp = new SolrResponseBase();
     rsp.setResponse(new NamedList<>());
+    String path = params != null ? params.get("path") : null;
     if (!(req instanceof CollectionAdminRequest)) {
       // maybe a V2Request?
       if (req instanceof V2Request) {
         params = SimUtils.v2AdminRequestToV1Params((V2Request)req);
         a = params.get(CoreAdminParams.ACTION);
+      } else if (path != null && (path.startsWith("/admin/") || path.startsWith("/cluster/"))) {
+        // pass it through, it's likely a generic request containing admin params
       } else {
         throw new UnsupportedOperationException("Only some CollectionAdminRequest-s are supported: " + req.getClass().getName() + ": " + req.getPath() + " " + req.getParams());
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index 2e5be68..861ef07 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -700,6 +700,19 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     cloudManager.getTimeSource().sleep(delays.get(op));
   }
 
+  public void simSetOpDelays(String collection, Map<String, Long> delays) {
+    Map<String, Long> currentDelays = opDelays.getOrDefault(collection, Collections.emptyMap());
+    Map<String, Long> newDelays = new HashMap<>(currentDelays);
+    delays.forEach((k, v) -> {
+      if (v == null) {
+        newDelays.remove(k);
+      } else {
+        newDelays.put(k, v);
+      }
+    });
+    opDelays.put(collection, newDelays);
+  }
+
   /**
    * Simulate running a shard leader election. This operation is a no-op if a leader already exists.
    * If a new leader is elected the cluster state is saved.
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
new file mode 100644
index 0000000..7914df7
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
@@ -0,0 +1,1091 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud.autoscaling.sim;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.io.Reader;
+import java.lang.invoke.MethodHandles;
+import java.net.URLDecoder;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrResponse;
+import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
+import org.apache.solr.client.solrj.cloud.autoscaling.Clause;
+import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
+import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
+import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
+import org.apache.solr.client.solrj.cloud.autoscaling.Suggester;
+import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventProcessorStage;
+import org.apache.solr.client.solrj.cloud.autoscaling.Variable;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.SolrClientCloudManager;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.client.solrj.request.RequestWriter;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.cloud.CloudUtil;
+import org.apache.solr.cloud.autoscaling.ActionContext;
+import org.apache.solr.cloud.autoscaling.AutoScaling;
+import org.apache.solr.cloud.autoscaling.AutoScalingHandler;
+import org.apache.solr.cloud.autoscaling.TriggerEvent;
+import org.apache.solr.cloud.autoscaling.TriggerListener;
+import org.apache.solr.cloud.autoscaling.TriggerListenerBase;
+import org.apache.solr.common.params.AutoScalingParams;
+import org.apache.solr.common.params.CollectionAdminParams;
+import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.IOUtils;
+import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.util.CLIO;
+import org.apache.solr.util.PropertiesUtil;
+import org.apache.solr.util.RedactionUtils;
+import org.apache.solr.util.TimeOut;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class represents an autoscaling scenario consisting of a series of autoscaling
+ * operations on a simulated cluster.
+ */
+public class SimScenario implements AutoCloseable {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  /** Context variable: Random live node name. */
+  public static final String RANDOM_NODE_CTX_PROP = "_random_node_";
+  /** Context variable: Node name of the current Overseer leader. */
+  public static final String OVERSEER_LEADER_CTX_PROP = "_overseer_leader_";
+  /** Context variable: List of live nodes. */
+  public static final String LIVE_NODES_CTX_PROP = "_live_nodes_";
+  /** Context variable: List of collections. */
+  public static final String COLLECTIONS_CTX_PROP = "_collections_";
+  /** Context variable: List of calculated suggestions. */
+  public static final String SUGGESTIONS_CTX_PROP = "_suggestions_";
+  /** Context variable: List of SolrResponses of SOLR_REQUEST operations. */
+  public static final String RESPONSES_CTX_PROP = "_responses_";
+  /** Context variable: Current loop iteration or none if outside of loop. */
+  public static final String LOOP_ITER_PROP = "_loop_iter_";
+  /** Last trigger event captured by WAIT_EVENT. */
+  public static final String TRIGGER_EVENT_PREFIX = "_trigger_event_";
+
+  public SimCloudManager cluster;
+  public AutoScalingConfig config;
+  public List<SimOp> ops = new ArrayList<>();
+  public Map<String, Object> context = new HashMap<>();
+  public PrintStream console = CLIO.getErrStream();
+  public boolean verbose;
+  public boolean abortLoop;
+  public boolean abortScenario;
+
+  /** Base class for implementation of scenario DSL actions. */
+  public static abstract class SimOp {
+    ModifiableSolrParams initParams;
+    ModifiableSolrParams params;
+
+    public void init(SolrParams params) {
+      this.initParams = new ModifiableSolrParams(params);
+    }
+
+    /**
+     * This method prepares a copy of initial params (and sets the value of {@link #params}
+     * with all property references resolved against the current {@link SimScenario#context}
+     * and system properties. This method should always be called before invoking
+     * {@link #execute(SimScenario)}.
+     * @param scenario current scenario
+     */
+    public void prepareCurrentParams(SimScenario scenario) {
+      Properties props = new Properties();
+      scenario.context.forEach((k, v) -> {
+        if (v instanceof String[]) {
+          v = String.join(",", (String[]) v);
+        } else if (v instanceof Collection) {
+          StringBuilder sb = new StringBuilder();
+          for (Object o : (Collection<Object>)v) {
+            if (sb.length() > 0) {
+              sb.append(',');
+            }
+            if ((o instanceof String) || (o instanceof Number)) {
+              sb.append(o);
+            } else {
+              // skip all values
+              return;
+            }
+          }
+          v = sb.toString();
+        } else if ((v instanceof String) || (v instanceof Number)) {
+          // don't convert, put as is
+        } else {
+          // skip
+          return;
+        }
+        props.put(k, v);
+      });
+      ModifiableSolrParams currentParams = new ModifiableSolrParams();
+      initParams.forEach(e -> {
+        String newKey = PropertiesUtil.substituteProperty(e.getKey(), props);
+        if (newKey == null) {
+          newKey = e.getKey();
+        }
+        String[] newValues;
+        if (e.getValue() != null && e.getValue().length > 0) {
+          String[] values = e.getValue();
+          newValues = new String[values.length];
+          for (int k = 0; k < values.length; k++) {
+            String newVal = PropertiesUtil.substituteProperty(values[k], props);
+            if (newVal == null) {
+              newVal = values[k];
+            }
+            newValues[k] = newVal;
+          }
+        } else {
+          newValues = e.getValue();
+        }
+        currentParams.add(newKey, newValues);
+      });
+      params = currentParams;
+    }
+
+    /**
+     * Execute the operation.
+     * @param scenario current scenario.
+     */
+    public abstract void execute (SimScenario scenario) throws Exception;
+  }
+
+
+  /**
+   * Actions supported by the scenario.
+   */
+  public enum SimAction {
+    /** Create a new simulated cluster. */
+    CREATE_CLUSTER,
+    /** Create a simulated cluster from autoscaling snapshot. */
+    LOAD_SNAPSHOT,
+    /** Save autoscaling snapshot of the current simulated cluster. */
+    SAVE_SNAPSHOT,
+    /** Calculate autoscaling suggestions and put them in the scenario's context. */
+    CALCULATE_SUGGESTIONS,
+    /** Apply previously calculated autoscaling suggestions. */
+    APPLY_SUGGESTIONS,
+    /** Kill specific nodes, or a number of randomly selected nodes. */
+    KILL_NODES,
+    /** Add new nodes. */
+    ADD_NODES,
+    /** Load autoscaling.json configuration from a file. */
+    LOAD_AUTOSCALING,
+    /** Start a loop. */
+    LOOP_START,
+    /** End a loop. */
+    LOOP_END,
+    /** Set operation delays to simulate long-running actions. */
+    SET_OP_DELAYS,
+    /** Execute a SolrRequest (must be supported by {@link SimCloudManager}). */
+    SOLR_REQUEST,
+    /** Wait for a collection to reach the indicated number of shards and replicas. */
+    WAIT_COLLECTION,
+    /** Prepare a listener to listen for an autoscaling event. */
+    EVENT_LISTENER,
+    /** Wait for an autoscaling event using previously prepared listener. */
+    WAIT_EVENT,
+    /** Run the simulation for a while, allowing background tasks to execute. */
+    RUN,
+    /** Dump the internal state of the simulator to console. */
+    DUMP,
+    /** Set a variable in context. */
+    CTX_SET,
+    /** Remove a variable from context. */
+    CTX_REMOVE,
+    /** Set metrics for a node. */
+    SET_NODE_METRICS,
+    /** Set metrics for each replica of a collection's shard(s). */
+    SET_SHARD_METRICS,
+    /** Bulk index a number of simulated documents. */
+    INDEX_DOCS,
+    /** Assert a condition. */
+    ASSERT;
+
+    public static SimAction get(String str) {
+      if (str != null) {
+        try {
+          return SimAction.valueOf(str.toUpperCase(Locale.ROOT));
+        } catch (Exception e) {
+          return null;
+        }
+      } else {
+        return null;
+      }
+    }
+
+    public String toLower() {
+      return toString().toLowerCase(Locale.ROOT);
+    }
+  }
+
+  public static Map<SimAction, Class<? extends SimOp>> simOps = new HashMap<>();
+  static {
+    simOps.put(SimAction.CREATE_CLUSTER, CreateCluster.class);
+    simOps.put(SimAction.LOAD_SNAPSHOT, LoadSnapshot.class);
+    simOps.put(SimAction.SAVE_SNAPSHOT, SaveSnapshot.class);
+    simOps.put(SimAction.LOAD_AUTOSCALING, LoadAutoscaling.class);
+    simOps.put(SimAction.CALCULATE_SUGGESTIONS, CalculateSuggestions.class);
+    simOps.put(SimAction.APPLY_SUGGESTIONS, ApplySuggestions.class);
+    simOps.put(SimAction.KILL_NODES, KillNodes.class);
+    simOps.put(SimAction.ADD_NODES, AddNodes.class);
+    simOps.put(SimAction.LOOP_START, LoopOp.class);
+    simOps.put(SimAction.LOOP_END, null);
+    simOps.put(SimAction.SET_OP_DELAYS, SetOpDelays.class);
+    simOps.put(SimAction.SOLR_REQUEST, RunSolrRequest.class);
+    simOps.put(SimAction.RUN, RunSimulator.class);
+    simOps.put(SimAction.WAIT_COLLECTION, WaitCollection.class);
+    simOps.put(SimAction.EVENT_LISTENER, SetEventListener.class);
+    simOps.put(SimAction.WAIT_EVENT, WaitEvent.class);
+    simOps.put(SimAction.CTX_SET, CtxSet.class);
+    simOps.put(SimAction.CTX_REMOVE, CtxRemove.class);
+    simOps.put(SimAction.DUMP, Dump.class);
+    simOps.put(SimAction.SET_NODE_METRICS, SetNodeMetrics.class);
+    simOps.put(SimAction.SET_SHARD_METRICS, SetShardMetrics.class);
+    simOps.put(SimAction.INDEX_DOCS, IndexDocs.class);
+    simOps.put(SimAction.ASSERT, Assert.class);
+  }
+
+  /**
+   * Loop action.
+   */
+  public static class LoopOp extends SimOp {
+    // populated by the DSL parser
+    List<SimOp> ops = new ArrayList<>();
+    int iterations;
+
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      iterations = Integer.parseInt(params.get("iterations", "10"));
+      for (int i = 0; i < iterations; i++) {
+        if (scenario.abortLoop) {
+          log.info("        -- abortLoop requested, aborting after " + i + " iterations.");
+          return;
+        }
+        scenario.context.put(LOOP_ITER_PROP, i);
+        log.info("   * iter " + (i + 1) + ":");
+        for (SimOp op : ops) {
+          op.prepareCurrentParams(scenario);
+          log.info("     - " + op.getClass().getSimpleName() + "\t" + op.params.toString());
+          op.execute(scenario);
+          if (scenario.abortLoop) {
+            log.info("        -- abortLoop requested, aborting after " + i + " iterations.");
+            return;
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Set a context property.
+   */
+  public static class CtxSet extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String key = params.required().get("key");
+      String[] values = params.required().getParams("value");
+      if (values != null) {
+        scenario.context.put(key, Arrays.asList(values));
+      } else {
+        scenario.context.remove(key);
+      }
+    }
+  }
+
+  /**
+   * Remove a context property.
+   */
+  public static class CtxRemove extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String key = params.required().get("key");
+      scenario.context.remove(key);
+    }
+  }
+
+  /**
+   * Create a simulated cluster.
+   */
+  public static class CreateCluster extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      int numNodes = Integer.parseInt(params.get("numNodes", "5"));
+      boolean disableMetricsHistory = Boolean.parseBoolean(params.get("disableMetricsHistory", "false"));
+      String timeSourceStr = params.get("timeSource", "simTime:50");
+      if (scenario.cluster != null) { // close & reset
+        IOUtils.closeQuietly(scenario.cluster);
+        scenario.context.clear();
+      }
+      scenario.cluster = SimCloudManager.createCluster(numNodes, TimeSource.get(timeSourceStr));
+      if (disableMetricsHistory) {
+        scenario.cluster.disableMetricsHistory();
+      }
+      scenario.config = scenario.cluster.getDistribStateManager().getAutoScalingConfig();
+    }
+  }
+
+  /**
+   * Create a simulated cluster from an autoscaling snapshot.
+   */
+  public static class LoadSnapshot extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String path = params.get("path");
+      SnapshotCloudManager snapshotCloudManager;
+      if (path == null) {
+        String zkHost = params.get("zkHost");
+        if (zkHost == null) {
+          throw new IOException(SimAction.LOAD_SNAPSHOT + " must specify 'path' or 'zkHost'");
+        } else {
+          try (CloudSolrClient cloudSolrClient = new CloudSolrClient.Builder(Collections.singletonList(zkHost), Optional.empty()).build()) {
+            cloudSolrClient.connect();
+            try (SolrClientCloudManager realCloudManager = new SolrClientCloudManager(NoopDistributedQueueFactory.INSTANCE, cloudSolrClient)) {
+              snapshotCloudManager = new SnapshotCloudManager(realCloudManager, null);
+            }
+          }
+        }
+      } else {
+        snapshotCloudManager = SnapshotCloudManager.readSnapshot(new File(path));
+      }
+      scenario.cluster = SimCloudManager.createCluster(snapshotCloudManager, null, snapshotCloudManager.getTimeSource());
+      scenario.config = scenario.cluster.getDistribStateManager().getAutoScalingConfig();
+    }
+  }
+
+  /**
+   * Save an autoscaling snapshot.
+   */
+  public static class SaveSnapshot extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String path = params.get("path");
+      if (path == null) {
+        throw new IOException(SimAction.SAVE_SNAPSHOT + " must specify 'path'");
+      }
+      boolean redact = Boolean.parseBoolean(params.get("redact", "false"));
+      SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(scenario.cluster, null);
+      snapshotCloudManager.saveSnapshot(new File(path), true, redact);
+    }
+  }
+
+  /**
+   * Load autoscaling.json configuration.
+   */
+  public static class LoadAutoscaling extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      Map<String, Object> map;
+      boolean addDefaults = Boolean.parseBoolean(params.get("withDefaultTriggers", "true"));
+      int defaultWaitFor = Integer.parseInt(params.get("defaultWaitFor", "120"));
+      String path = params.get("path");
+      if (path == null) {
+        String json = params.get("json");
+        if (json == null) {
+          throw new IOException(SimAction.LOAD_AUTOSCALING + " must specify either 'path' or 'json'");
+        } else {
+          map = (Map<String, Object>) Utils.fromJSONString(json);
+        }
+      } else {
+        File f = new File(path);
+        Reader r;
+        if (f.exists()) {
+          r = new InputStreamReader(new FileInputStream(f), Charset.forName("UTF-8"));
+        } else {
+          InputStream is = getClass().getResourceAsStream(path);
+          if (is == null) {
+            throw new IOException("path " + path + " does not exist and it's not a resource");
+          }
+          r = new InputStreamReader(is, Charset.forName("UTF-8"));
+        }
+        map = (Map<String, Object>) Utils.fromJSON(r);
+      }
+      AutoScalingConfig config = new AutoScalingConfig(map);
+      if (addDefaults) {
+        // add default triggers
+        if (!config.getTriggerConfigs().containsKey(AutoScaling.AUTO_ADD_REPLICAS_TRIGGER_NAME)) {
+          Map<String, Object> props = new HashMap<>(AutoScaling.AUTO_ADD_REPLICAS_TRIGGER_PROPS);
+          props.put("waitFor", defaultWaitFor);
+          AutoScalingConfig.TriggerConfig trigger = new AutoScalingConfig.TriggerConfig(AutoScaling.AUTO_ADD_REPLICAS_TRIGGER_NAME, props);
+          config = config.withTriggerConfig(trigger);
+          config = AutoScalingHandler.withSystemLogListener(config, AutoScaling.AUTO_ADD_REPLICAS_TRIGGER_NAME);
+        }
+        if (!config.getTriggerConfigs().containsKey(AutoScaling.SCHEDULED_MAINTENANCE_TRIGGER_NAME)) {
+          AutoScalingConfig.TriggerConfig trigger = new AutoScalingConfig.TriggerConfig(AutoScaling.SCHEDULED_MAINTENANCE_TRIGGER_NAME, AutoScaling.SCHEDULED_MAINTENANCE_TRIGGER_PROPS);
+          config = config.withTriggerConfig(trigger);
+          config = AutoScalingHandler.withSystemLogListener(config, AutoScaling.SCHEDULED_MAINTENANCE_TRIGGER_NAME);
+        }
+      }
+      scenario.config = config;
+      // set this config on the simulator
+      scenario.cluster.getSimDistribStateManager().simSetAutoScalingConfig(config);
+      // wait until it finished processing the config
+      (new TimeOut(30, TimeUnit.SECONDS, scenario.cluster.getTimeSource()))
+          .waitFor("OverseerTriggerThread never caught up to the latest znodeVersion", () -> {
+            try {
+              AutoScalingConfig autoscalingConfig = scenario.cluster.getDistribStateManager().getAutoScalingConfig();
+              return autoscalingConfig.getZkVersion() == scenario.cluster.getOverseerTriggerThread().getProcessedZnodeVersion();
+            } catch (Exception e) {
+              throw new RuntimeException("FAILED", e);
+            }
+          });
+
+    }
+  }
+
+  /**
+   * Kill one or more nodes.
+   */
+  public static class KillNodes extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      if (params.get("numNodes") != null) {
+        int numNodes = Integer.parseInt(params.get("numNodes"));
+        scenario.cluster.simRemoveRandomNodes(numNodes, false, scenario.cluster.getRandom());
+      } else if (params.get("nodes") != null || params.get("node") != null) {
+        Set<String> nodes = new HashSet<>();
+        String[] nodesValues = params.getParams("nodes");
+        if (nodesValues != null) {
+          for (String nodesValue : nodesValues) {
+            String[] vals = nodesValue.split(",");
+            nodes.addAll(Arrays.asList(vals));
+          }
+        }
+        nodesValues = params.getParams("node");
+        if (nodesValues != null) {
+          nodes.addAll(Arrays.asList(nodesValues));
+        }
+        for (String node : nodes) {
+          scenario.cluster.simRemoveNode(node, false);
+        }
+      }
+    }
+  }
+
+  /**
+   * Add one or more nodes.
+   */
+  public static class AddNodes extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      int numNodes = Integer.parseInt(params.get("numNodes"));
+      for (int i = 0; i < numNodes; i++) {
+        scenario.cluster.simAddNode();
+      }
+    }
+  }
+
+  /**
+   * Calculate autoscaling suggestions.
+   */
+  public static class CalculateSuggestions extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      List<Suggester.SuggestionInfo> suggestions = PolicyHelper.getSuggestions(scenario.config, scenario.cluster);
+      scenario.context.put(SUGGESTIONS_CTX_PROP, suggestions);
+      log.info("        - " + suggestions.size() + " suggestions");
+      if (suggestions.isEmpty()) {
+        scenario.abortLoop = true;
+      }
+    }
+  }
+
+  /**
+   * Apply autoscaling suggestions.
+   */
+  public static class ApplySuggestions extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      List<Suggester.SuggestionInfo> suggestions = (List<Suggester.SuggestionInfo>) scenario.context.getOrDefault(SUGGESTIONS_CTX_PROP, Collections.emptyList());
+      int unresolvedCount = 0;
+      for (Suggester.SuggestionInfo suggestion : suggestions) {
+        SolrRequest operation = suggestion.getOperation();
+        if (operation == null) {
+          unresolvedCount++;
+          if (suggestion.getViolation() == null) {
+            log.error("       -- ignoring suggestion without violation and without operation: " + suggestion);
+          }
+          continue;
+        }
+        SolrParams params = operation.getParams();
+        if (operation instanceof V2Request) {
+          params = SimUtils.v2AdminRequestToV1Params((V2Request)operation);
+        }
+        Map<String, Object> paramsMap = new LinkedHashMap<>();
+        params.toMap(paramsMap);
+        ReplicaInfo info = scenario.cluster.getSimClusterStateProvider().simGetReplicaInfo(
+            params.get(CollectionAdminParams.COLLECTION), params.get("replica"));
+        if (info == null) {
+          log.error("Could not find ReplicaInfo for params: " + params);
+        } else if (scenario.verbose) {
+          paramsMap.put("replicaInfo", info);
+        } else if (info.getVariable(Variable.Type.CORE_IDX.tagName) != null) {
+          paramsMap.put(Variable.Type.CORE_IDX.tagName, info.getVariable(Variable.Type.CORE_IDX.tagName));
+        }
+        try {
+          scenario.cluster.request(operation);
+        } catch (Exception e) {
+          log.error("Aborting - error executing suggestion " + suggestion, e);
+          break;
+        }
+      }
+      if (suggestions.size() > 0 && unresolvedCount == suggestions.size()) {
+        log.info("        -- aborting simulation, only " + unresolvedCount + " unresolved violations remain");
+        scenario.abortLoop = true;
+      }
+    }
+  }
+
+  /**
+   * Execute a SolrRequest supported by {@link SimCloudManager}.
+   */
+  public static class RunSolrRequest extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String path = params.get("path", "/");
+      SolrRequest.METHOD m = SolrRequest.METHOD.valueOf(params.get("httpMethod", "GET"));
+      params.remove("httpMethod");
+      String streamBody = params.get("stream.body");
+      params.remove("stream.body");
+      GenericSolrRequest req = new GenericSolrRequest(m, path, params);
+      if (streamBody != null) {
+        req.setContentWriter(new RequestWriter.StringPayloadContentWriter(streamBody, "application/json"));
+      }
+      SolrResponse rsp = scenario.cluster.request(req);
+      List<SolrResponse> responses = (List<SolrResponse>) scenario.context.computeIfAbsent(RESPONSES_CTX_PROP, Utils.NEW_ARRAYLIST_FUN);
+      responses.add(rsp);
+    }
+  }
+
+  /**
+   * Set delays for specified collection operations in order to simulate slow execution.
+   */
+  public static class SetOpDelays extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String[] collections = params.remove("collection");
+      if (collections == null || collections.length == 0) {
+        throw new IOException("'collection' param is required but missing: " + params);
+      }
+      Map<String, Long> delays = new HashMap<>();
+      params.forEach(e -> {
+        String key = e.getKey();
+        CollectionParams.CollectionAction a = CollectionParams.CollectionAction.get(key);
+        if (a == null) {
+          log.warn("Invalid collection action " + key + ", skipping...");
+          return;
+        }
+        String[] values = e.getValue();
+        if (values == null || values[0].isBlank()) {
+          delays.put(a.name(), null);
+        } else {
+          Long value = Long.parseLong(values[0]);
+          delays.put(a.name(), value);
+        }
+      });
+      for (String collection : collections) {
+        scenario.cluster.getSimClusterStateProvider().simSetOpDelays(collection, delays);
+      }
+    }
+  }
+
+  /**
+   * Run the simulator for a while.
+   */
+  public static class RunSimulator extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      int timeMs = Integer.parseInt(params.get("time", "60000"));
+      scenario.cluster.getTimeSource().sleep(timeMs);
+    }
+  }
+
+  /**
+   * Wait for a specific collection shape.
+   */
+  public static class WaitCollection extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String collection = params.required().get("collection");
+      int shards = Integer.parseInt(params.required().get("shards"));
+      int replicas = Integer.parseInt(params.required().get("replicas"));
+      boolean withInactive = Boolean.parseBoolean(params.get("withInactive", "false"));
+      boolean requireLeaders = Boolean.parseBoolean(params.get("requireLeaders", "true"));
+      int waitSec = Integer.parseInt(params.required().get("wait", "" + CloudUtil.DEFAULT_TIMEOUT));
+      CloudUtil.waitForState(scenario.cluster, collection, waitSec, TimeUnit.SECONDS,
+          CloudUtil.clusterShape(shards, replicas, withInactive, requireLeaders));
+    }
+  }
+
+  private static class SimWaitListener extends TriggerListenerBase {
+    private final TimeSource timeSource;
+    private final AutoScalingConfig.TriggerListenerConfig config;
+    private CountDownLatch triggerFired = new CountDownLatch(1);
+    private TriggerEvent event;
+
+    SimWaitListener(TimeSource timeSource, AutoScalingConfig.TriggerListenerConfig config) {
+      this.timeSource = timeSource;
+      this.config = config;
+    }
+
+    @Override
+    public AutoScalingConfig.TriggerListenerConfig getConfig() {
+      return config;
+    }
+
+    @Override
+    public boolean isEnabled() {
+      return true;
+    }
+
+    @Override
+    public void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName, ActionContext context, Throwable error, String message) throws Exception {
+      triggerFired.countDown();
+      this.event = event;
+    }
+
+    public TriggerEvent getEvent() {
+      return event;
+    }
+
+    public void wait(int waitSec) throws Exception {
+      long waitTime = timeSource.convertDelay(TimeUnit.SECONDS, waitSec, TimeUnit.MILLISECONDS);
+      boolean await =  triggerFired.await(waitTime, TimeUnit.MILLISECONDS);
+      if (!await) {
+        throw new IOException("Timed out waiting for trigger " + config.trigger + " to fire after simulated " +
+            waitSec + "s (real " + waitTime + "ms).");
+      }
+    }
+  }
+
+  /**
+   * Set a temporary listener to wait for a specific trigger event processing.
+   */
+  public static class SetEventListener extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String trigger = params.required().get(AutoScalingParams.TRIGGER);
+      Map<String, Object> cfgMap = new HashMap<>();
+      String name = ".sim_wait_event_" + trigger;
+      cfgMap.put(AutoScalingParams.NAME, name);
+      cfgMap.put(AutoScalingParams.TRIGGER, trigger);
+
+      String[] beforeActions = params.getParams(AutoScalingParams.BEFORE_ACTION);
+      String[] afterActions = params.getParams(AutoScalingParams.AFTER_ACTION);
+      if (beforeActions != null) {
+        for (String beforeAction : beforeActions) {
+          ((List<String>)cfgMap.computeIfAbsent(AutoScalingParams.BEFORE_ACTION, Utils.NEW_ARRAYLIST_FUN)).add(beforeAction);
+        }
+      }
+      if (afterActions != null) {
+        for (String afterAction : afterActions) {
+          ((List<String>)cfgMap.computeIfAbsent(AutoScalingParams.AFTER_ACTION, Utils.NEW_ARRAYLIST_FUN)).add(afterAction);
+        }
+      }
+      String[] stages = params.required().getParams(AutoScalingParams.STAGE);
+      for (String stage : stages) {
+        String[] lst = stage.split("[,\\s]+");
+        for (String val : lst) {
+          try {
+            TriggerEventProcessorStage.valueOf(val);
+            ((List<String>)cfgMap.computeIfAbsent(AutoScalingParams.STAGE, Utils.NEW_ARRAYLIST_FUN)).add(val);
+          } catch (IllegalArgumentException e) {
+            throw new IOException("Invalid stage name '" + val + "'");
+          }
+        }
+      }
+      final AutoScalingConfig.TriggerListenerConfig listenerConfig = new AutoScalingConfig.TriggerListenerConfig(name, cfgMap);
+      TriggerListener listener = new SimWaitListener(scenario.cluster.getTimeSource(), listenerConfig);
+      if (scenario.context.containsKey("_sim_waitListener_" + trigger)) {
+        throw new IOException("currently only one listener can be set per trigger. Trigger name: " + trigger);
+      }
+      scenario.context.put("_sim_waitListener_" + trigger, listener);
+      scenario.cluster.getOverseerTriggerThread().getScheduledTriggers().addAdditionalListener(listener);
+    }
+  }
+
+  /**
+   * Wait for the previously set listener to capture an event.
+   */
+  public static class WaitEvent extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String trigger = params.required().get(AutoScalingParams.TRIGGER);
+      int waitSec = Integer.parseInt(params.get("wait", "" + CloudUtil.DEFAULT_TIMEOUT));
+      SimWaitListener listener = (SimWaitListener)scenario.context.remove("_sim_waitListener_" + trigger);
+      if (listener == null) {
+        throw new IOException(SimAction.WAIT_EVENT + " must be preceded by " + SimAction.EVENT_LISTENER + " for trigger " + trigger);
+      }
+      try {
+        listener.wait(waitSec);
+        scenario.context.remove(TRIGGER_EVENT_PREFIX + trigger);
+        if (listener.getEvent() != null) {
+          Map<String, Object> ev = listener.getEvent().toMap(new LinkedHashMap<>());
+          scenario.context.put(TRIGGER_EVENT_PREFIX + trigger, ev);
+        }
+      } finally {
+        scenario.cluster.getOverseerTriggerThread().getScheduledTriggers().removeAdditionalListener(listener);
+      }
+    }
+  }
+
+  public static class SetNodeMetrics extends SimOp {
+
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String nodeset = params.required().get(Clause.NODESET);
+      Set<String> nodes = new HashSet<>();
+      if (nodeset.equals(Policy.ANY)) {
+        nodes.addAll(scenario.cluster.getLiveNodesSet().get());
+      } else {
+        String[] list = nodeset.split("[,\\s]+");
+        for (String node : list) {
+          if (node.isBlank()) {
+            continue;
+          }
+          nodes.add(node);
+        }
+      }
+      Map<String, Object> values = new HashMap<>();
+      params.remove(Clause.NODESET);
+      for (String key : params.getParameterNames()) {
+        values.put(key, params.get(key));
+      }
+      for (String node : nodes) {
+        scenario.cluster.getSimNodeStateProvider().simSetNodeValues(node, values);
+      }
+    }
+  }
+
+  public static class SetShardMetrics extends SimOp {
+
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String collection = params.required().get("collection");
+      String shard = params.get("shard");
+      boolean delta = params.getBool("delta", false);
+      boolean divide = params.getBool("divide", false);
+      params.remove("collection");
+      params.remove("shard");
+      params.remove("delta");
+      params.remove("divide");
+      Map<String, Object> values = new HashMap<>();
+      for (String key : params.getParameterNames()) {
+        // try guessing if it's a number
+        try {
+          Double d = Double.valueOf(params.get(key));
+          values.put(key, d);
+        } catch (NumberFormatException nfe) {
+          // not a number
+          values.put(key, params.get(key));
+        }
+      }
+      values.forEach((k, v) -> {
+        try {
+          scenario.cluster.getSimClusterStateProvider().simSetShardValue(collection, shard, k, v, delta, divide);
+        } catch (Exception e) {
+          throw new RuntimeException("Error setting shard value", e);
+        }
+      });
+    }
+  }
+
+  public static class IndexDocs extends SimOp {
+
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String collection = params.required().get("collection");
+      long numDocs = params.required().getLong("numDocs");
+      long start = params.getLong("start", 0L);
+
+      UpdateRequest ureq = new UpdateRequest();
+      ureq.setParam("collection", collection);
+      ureq.setDocIterator(new FakeDocIterator(start, numDocs));
+      scenario.cluster.simGetSolrClient().request(ureq);
+    }
+  }
+
+  public enum Condition {
+    EQUALS,
+    NOT_EQUALS,
+    NULL,
+    NOT_NULL;
+
+    public static Condition get(String p) {
+      if (p == null) {
+        return null;
+      } else {
+        try {
+          return Condition.valueOf(p.toUpperCase(Locale.ROOT));
+        } catch (Exception e) {
+          return null;
+        }
+      }
+    }
+  }
+
+  public static class Assert extends SimOp {
+
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      String key = params.get("key");
+      Condition condition = Condition.get(params.required().get("condition"));
+      if (condition == null) {
+        throw new IOException("Invalid 'condition' in params: " + params);
+      }
+      String expected = params.get("expected");
+      if (condition != Condition.NOT_NULL && condition != Condition.NULL && expected == null) {
+        throw new IOException("'expected' param is required when condition is " + condition);
+      }
+      Object value;
+      if (key != null) {
+        if (key.contains("/")) {
+          value = Utils.getObjectByPath(scenario.context, true, key);
+        } else {
+          value = scenario.context.get(key);
+        }
+      } else {
+        value = params.required().get("value");
+      }
+      switch (condition) {
+        case NULL:
+          if (value != null) {
+            throw new IOException("expected value should be null but was '" + value + "'");
+          }
+          break;
+        case NOT_NULL:
+          if (value == null) {
+            throw new IOException("expected value should not be null");
+          }
+          break;
+        case EQUALS:
+          if (!expected.equals(String.valueOf(value))) {
+            throw new IOException("expected value is '" + expected + "' but actual value is '" + value + "'");
+          }
+          break;
+        case NOT_EQUALS:
+          if (expected.equals(String.valueOf(value))) {
+            throw new IOException("expected value is '" + expected + "' and actual value is the same while it should be different");
+          }
+          break;
+      }
+    }
+  }
+
+
+  /**
+   * Dump the simulator state to the console.
+   */
+  public static class Dump extends SimOp {
+    @Override
+    public void execute(SimScenario scenario) throws Exception {
+      boolean redact = Boolean.parseBoolean(params.get("redact", "false"));
+      boolean withData = Boolean.parseBoolean(params.get("withData", "false"));
+      boolean withStats = Boolean.parseBoolean(params.get("withStats", "false"));
+      boolean withSuggestions = Boolean.parseBoolean(params.get("withSuggestions", "true"));
+      boolean withDiagnostics = Boolean.parseBoolean(params.get("withDiagnostics", "false"));
+      boolean withNodeState = Boolean.parseBoolean(params.get("withNodeState", "false"));
+      boolean withClusterState = Boolean.parseBoolean(params.get("withClusterState", "false"));
+      boolean withManagerState = Boolean.parseBoolean(params.get("withManagerState", "false"));
+      SnapshotCloudManager snapshotCloudManager = new SnapshotCloudManager(scenario.cluster, null);
+      Map<String, Object> snapshot = snapshotCloudManager.getSnapshot(true, redact);
+      if (!withData) {
+        snapshot.remove(SnapshotCloudManager.DISTRIB_STATE_KEY);
+      }
+      if (!withNodeState) {
+        snapshot.remove(SnapshotCloudManager.NODE_STATE_KEY);
+      }
+      if (!withClusterState) {
+        snapshot.remove(SnapshotCloudManager.CLUSTER_STATE_KEY);
+      }
+      if (!withStats) {
+        snapshot.remove(SnapshotCloudManager.STATISTICS_STATE_KEY);
+      }
+      if (!withManagerState) {
+        snapshot.remove(SnapshotCloudManager.MANAGER_STATE_KEY);
+      }
+      if (!withDiagnostics) {
+        ((Map<String, Object>)snapshot.get(SnapshotCloudManager.AUTOSCALING_STATE_KEY)).remove("diagnostics");
+      }
+      if (!withSuggestions) {
+        ((Map<String, Object>)snapshot.get(SnapshotCloudManager.AUTOSCALING_STATE_KEY)).remove("suggestions");
+      }
+      String data = Utils.toJSONString(snapshot);
+      if (redact) {
+        RedactionUtils.RedactionContext ctx = SimUtils.getRedactionContext(snapshotCloudManager.getClusterStateProvider().getClusterState());
+        data = RedactionUtils.redactNames(ctx.getRedactions(), data);
+      }
+      scenario.console.println(data);
+    }
+  }
+
+  /**
+   * Parse a DSL string and create a scenario ready to run.
+   * @param data DSL string with commands and parameters
+   * @return configured scenario
+   * @throws Exception on syntax errors
+   */
+  public static SimScenario load(String data) throws Exception {
+    SimScenario scenario = new SimScenario();
+    String[] lines = data.split("\\r?\\n");
+    for (int i = 0; i < lines.length; i++) {
+      String line = lines[i];
+      line = line.trim();
+      if (line.isBlank() || line.startsWith("#") || line.startsWith("//")) {
+        continue;
+      }
+      // remove trailing / / comments
+      String[] comments = line.split("//");
+      String expr = comments[0];
+      // split on blank
+      String[] parts = expr.split("\\s+");
+      if (parts.length > 2) {
+        log.warn("Invalid line - wrong number of parts " + parts.length + ", skipping: " + line);
+        continue;
+      }
+      SimAction action = SimAction.get(parts[0]);
+      if (action == null) {
+        log.warn("Invalid scenario action " + parts[0] + ", skipping...");
+        continue;
+      }
+      if (action == SimAction.LOOP_END) {
+        if (!scenario.context.containsKey("loop")) {
+          throw new IOException("LOOP_END without start!");
+        }
+        scenario.context.remove("loop");
+        continue;
+      }
+      Class<? extends SimOp> opClass = simOps.get(action);
+      SimOp op = opClass.getConstructor().newInstance();
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      if (parts.length > 1) {
+        String paramsString = parts[1];
+        if (parts[1].contains("?")) { // url-like with path?params...
+          String[] urlParts = parts[1].split("\\?");
+          params.set("path", urlParts[0]);
+          paramsString = urlParts.length > 1 ? urlParts[1] : "";
+        }
+        String[] paramsParts = paramsString.split("&");
+        for (String paramPair : paramsParts) {
+          String[] paramKV = paramPair.split("=");
+          String k = URLDecoder.decode(paramKV[0], "UTF-8");
+          String v = paramKV.length > 1 ? URLDecoder.decode(paramKV[1], "UTF-8") : null;
+          params.add(k, v);
+        }
+      }
+      op.init(params);
+      // loop handling
+      if (action == SimAction.LOOP_START) {
+        if (scenario.context.containsKey("loop")) {
+          throw new IOException("only one loop level is allowed");
+        }
+        scenario.context.put("loop", op);
+        scenario.ops.add(op);
+        continue;
+      }
+      LoopOp currentLoop = (LoopOp) scenario.context.get("loop");
+      if (currentLoop != null) {
+        currentLoop.ops.add(op);
+      } else {
+        scenario.ops.add(op);
+      }
+    }
+    if (scenario.context.containsKey("loop")) {
+      throw new IOException("Unterminated loop statement");
+    }
+    // sanity check set_listener / wait_listener
+    int numSets = 0, numWaits = 0;
+    for (SimOp op : scenario.ops) {
+      if (op instanceof SetEventListener) {
+        numSets++;
+      } else if (op instanceof WaitEvent) {
+        numWaits++;
+      }
+      if (numWaits > numSets) {
+        throw new Exception("Unexpected " + SimAction.WAIT_EVENT + " without previous " + SimAction.EVENT_LISTENER);
+      }
+    }
+    if (numSets > numWaits) {
+      throw new Exception(SimAction.EVENT_LISTENER + " count should be equal to " + SimAction.WAIT_EVENT + " count but was " +
+          numSets + " > " + numWaits);
+    }
+    return scenario;
+  }
+
+  /**
+   * Run the scenario.
+   */
+  public void run() throws Exception {
+    for (int i = 0; i < ops.size(); i++) {
+      if (abortScenario) {
+        log.info("-- abortScenario requested, aborting after " + i + " ops.");
+        return;
+      }
+      SimOp op = ops.get(i);
+      log.info((i + 1) + ".\t" + op.getClass().getSimpleName() + "\t" + op.initParams.toString());
+      // substitute parameters based on the current context
+      if (cluster != null && cluster.getLiveNodesSet().size() > 0) {
+        context.put(LIVE_NODES_CTX_PROP, new ArrayList<>(cluster.getLiveNodesSet().get()));
+        context.put(RANDOM_NODE_CTX_PROP, cluster.getSimClusterStateProvider().simGetRandomNode());
+        context.put(COLLECTIONS_CTX_PROP, cluster.getSimClusterStateProvider().simListCollections());
+        context.put(OVERSEER_LEADER_CTX_PROP, cluster.getSimClusterStateProvider().simGetOverseerLeader());
+      } else {
+        context.remove(LIVE_NODES_CTX_PROP);
+        context.remove(COLLECTIONS_CTX_PROP);
+        context.remove(RANDOM_NODE_CTX_PROP);
+        context.remove(SUGGESTIONS_CTX_PROP);
+        context.remove(OVERSEER_LEADER_CTX_PROP);
+      }
+      op.prepareCurrentParams(this);
+      log.info("\t\t" + op.getClass().getSimpleName() + "\t" + op.params.toString());
+      op.execute(this);
+    }
+  }
+
+  @Override
+  public void close() throws Exception {
+    if (cluster != null) {
+      cluster.close();
+      cluster = null;
+    }
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java
index 1c5d606..b7d59de 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java
@@ -134,17 +134,20 @@ public class SimUtils {
     }
     allReplicaInfos.keySet().forEach(collection -> {
       Set<String> infosCores = allReplicaInfos.getOrDefault(collection, Collections.emptyMap()).keySet();
-      Set<String> csCores = allReplicas.getOrDefault(collection, Collections.emptyMap()).keySet();
+      Map<String, Replica> replicas = allReplicas.getOrDefault(collection, Collections.emptyMap());
+      Set<String> csCores = replicas.keySet();
       if (!infosCores.equals(csCores)) {
         Set<String> notInClusterState = infosCores.stream()
             .filter(k -> !csCores.contains(k))
             .collect(Collectors.toSet());
         Set<String> notInNodeProvider = csCores.stream()
-            .filter(k -> !infosCores.contains(k))
+            .filter(k -> !infosCores.contains(k) && replicas.get(k).isActive(solrCloudManager.getClusterStateProvider().getLiveNodes()))
             .collect(Collectors.toSet());
-        throw new RuntimeException("Mismatched replica data between ClusterState and NodeStateProvider:\n\t" +
-            "replica not in ClusterState: " + notInClusterState + "\n\t" +
-            "replica not in NodeStateProvider: " + notInNodeProvider);
+        if (!notInClusterState.isEmpty() || !notInNodeProvider.isEmpty()) {
+          throw new RuntimeException("Mismatched replica data for collection " + collection + " between ClusterState and NodeStateProvider:\n\t" +
+              "replica in NodeStateProvider but not in ClusterState: " + notInClusterState + "\n\t" +
+              "replica in ClusterState but not in NodeStateProvider: " + notInNodeProvider);
+        }
       }
     });
     // verify all replicas have size info
@@ -349,6 +352,9 @@ public class SimUtils {
     }
     String a = cmd.keySet().iterator().next();
     ModifiableSolrParams params = new ModifiableSolrParams();
+    if (req.getParams() != null) {
+      params.add(req.getParams());
+    }
     params.add(CollectionAdminParams.COLLECTION, path.substring(3));
     if (req.getParams() != null) {
       params.add(req.getParams());
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index c32783c..1d562c3 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -100,6 +100,7 @@ import org.apache.solr.logging.MDCLoggingContext;
 import org.apache.solr.metrics.SolrCoreMetricManager;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.pkg.PackageLoader;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.request.SolrRequestInfo;
@@ -211,7 +212,9 @@ public class CoreContainer {
 
   protected volatile SolrMetricManager metricManager;
 
-  protected volatile String metricTag = Integer.toHexString(hashCode());
+  protected volatile String metricTag = SolrMetricProducer.getUniqueMetricTag(this, null);
+
+  protected volatile SolrMetricsContext solrMetricsContext;
 
   protected MetricsHandler metricsHandler;
 
@@ -398,7 +401,7 @@ public class CoreContainer {
           getResourceLoader().newInstance(klas, AuditLoggerPlugin.class));
 
       newAuditloggerPlugin.plugin.init(auditConf);
-      newAuditloggerPlugin.plugin.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "/auditlogging");
+      newAuditloggerPlugin.plugin.initializeMetrics(solrMetricsContext, "/auditlogging");
     } else {
       log.debug("Security conf doesn't exist. Skipping setup for audit logging module.");
     }
@@ -455,8 +458,7 @@ public class CoreContainer {
     if (authenticationPlugin != null) {
       authenticationPlugin.plugin.init(authenticationConfig);
       setupHttpClientForAuthPlugin(authenticationPlugin.plugin);
-      authenticationPlugin.plugin.initializeMetrics
-        (metricManager, SolrInfoBean.Group.node.toString(), metricTag, "/authentication");
+      authenticationPlugin.plugin.initializeMetrics(solrMetricsContext, "/authentication");
     }
     this.authenticationPlugin = authenticationPlugin;
     try {
@@ -618,6 +620,8 @@ public class CoreContainer {
     containerHandlers.getApiBag().register(new AnnotatedApi(packageStoreAPI.writeAPI), Collections.EMPTY_MAP);
 
     metricManager = new SolrMetricManager(loader, cfg.getMetricsConfig());
+    String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node);
+    solrMetricsContext = new SolrMetricsContext(metricManager, registryName, metricTag);
 
     coreContainerWorkExecutor = MetricUtils.instrumentedExecutorService(
         coreContainerWorkExecutor, null,
@@ -627,11 +631,11 @@ public class CoreContainer {
     shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader);
     if (shardHandlerFactory instanceof SolrMetricProducer) {
       SolrMetricProducer metricProducer = (SolrMetricProducer) shardHandlerFactory;
-      metricProducer.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "httpShardHandler");
+      metricProducer.initializeMetrics(solrMetricsContext, "httpShardHandler");
     }
 
     updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig());
-    updateShardHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "updateShardHandler");
+    updateShardHandler.initializeMetrics(solrMetricsContext, "updateShardHandler");
 
     solrCores.load(loader);
 
@@ -644,7 +648,8 @@ public class CoreContainer {
     if (isZooKeeperAware()) {
       pkiAuthenticationPlugin = new PKIAuthenticationPlugin(this, zkSys.getZkController().getNodeName(),
           (PublicKeyHandler) containerHandlers.get(PublicKeyHandler.PATH));
-      pkiAuthenticationPlugin.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "/authentication/pki");
+      // use deprecated API for back-compat, remove in 9.0
+      pkiAuthenticationPlugin.initializeMetrics(solrMetricsContext, "/authentication/pki");
       TracerConfigurator.loadTracer(loader, cfg.getTracerConfiguratorPluginInfo(), getZkController().getZkStateReader());
     }
 
@@ -664,7 +669,7 @@ public class CoreContainer {
     // metricsHistoryHandler uses metricsHandler, so create it first
     metricsHandler = new MetricsHandler(this);
     containerHandlers.put(METRICS_PATH, metricsHandler);
-    metricsHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, METRICS_PATH);
+    metricsHandler.initializeMetrics(solrMetricsContext, METRICS_PATH);
 
     createMetricsHistoryHandler();
 
@@ -674,7 +679,7 @@ public class CoreContainer {
     metricsCollectorHandler.init(null);
 
     containerHandlers.put(AUTHZ_PATH, securityConfHandler);
-    securityConfHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, AUTHZ_PATH);
+    securityConfHandler.initializeMetrics(solrMetricsContext, AUTHZ_PATH);
     containerHandlers.put(AUTHC_PATH, securityConfHandler);
 
 
@@ -689,22 +694,20 @@ public class CoreContainer {
 
     // initialize gauges for reporting the number of cores and disk total/free
 
-    String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node);
-    String metricTag = Integer.toHexString(hashCode());
-    metricManager.registerGauge(null, registryName, () -> solrCores.getCores().size(),
-        metricTag, true, "loaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
-    metricManager.registerGauge(null, registryName, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
-        metricTag, true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores");
-    metricManager.registerGauge(null, registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
-        metricTag, true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
+    solrMetricsContext.gauge(null, () -> solrCores.getCores().size(),
+        true, "loaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
+    solrMetricsContext.gauge(null, () -> solrCores.getLoadedCoreNames().size() - solrCores.getCores().size(),
+        true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores");
+    solrMetricsContext.gauge(null, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(),
+        true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores");
     Path dataHome = cfg.getSolrDataHome() != null ? cfg.getSolrDataHome() : cfg.getCoreRootDirectory();
-    metricManager.registerGauge(null, registryName, () -> dataHome.toFile().getTotalSpace(),
-        metricTag, true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
-    metricManager.registerGauge(null, registryName, () -> dataHome.toFile().getUsableSpace(),
-        metricTag, true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
-    metricManager.registerGauge(null, registryName, () -> dataHome.toAbsolutePath().toString(),
-        metricTag, true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs");
-    metricManager.registerGauge(null, registryName, () -> {
+    solrMetricsContext.gauge(null, () -> dataHome.toFile().getTotalSpace(),
+        true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
+    solrMetricsContext.gauge(null, () -> dataHome.toFile().getUsableSpace(),
+        true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs");
+    solrMetricsContext.gauge(null, () -> dataHome.toAbsolutePath().toString(),
+        true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs");
+    solrMetricsContext.gauge(null, () -> {
           try {
             return org.apache.lucene.util.IOUtils.spins(dataHome.toAbsolutePath());
           } catch (IOException e) {
@@ -712,14 +715,14 @@ public class CoreContainer {
             return true;
           }
         },
-        metricTag, true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs");
-    metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
-        metricTag, true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
-    metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
-        metricTag, true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
-    metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toAbsolutePath().toString(),
-        metricTag, true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
-    metricManager.registerGauge(null, registryName, () -> {
+        true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs");
+    solrMetricsContext.gauge(null, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(),
+        true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
+    solrMetricsContext.gauge(null, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(),
+        true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
+    solrMetricsContext.gauge(null, () -> cfg.getCoreRootDirectory().toAbsolutePath().toString(),
+        true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
+    solrMetricsContext.gauge(null, () -> {
           try {
             return org.apache.lucene.util.IOUtils.spins(cfg.getCoreRootDirectory().toAbsolutePath());
           } catch (IOException e) {
@@ -727,24 +730,28 @@ public class CoreContainer {
             return true;
           }
         },
-        metricTag, true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
+        true, "spins", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot");
     // add version information
-    metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getSpecificationVersion(),
-        metricTag, true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version");
-    metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getImplementationVersion(),
-        metricTag, true, "implementation", SolrInfoBean.Category.CONTAINER.toString(), "version");
+    solrMetricsContext.gauge(null, () -> this.getClass().getPackage().getSpecificationVersion(),
+        true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version");
+    solrMetricsContext.gauge(null, () -> this.getClass().getPackage().getImplementationVersion(),
+        true, "implementation", SolrInfoBean.Category.CONTAINER.toString(), "version");
 
     SolrFieldCacheBean fieldCacheBean = new SolrFieldCacheBean();
-    fieldCacheBean.initializeMetrics(metricManager, registryName, metricTag, null);
+    fieldCacheBean.initializeMetrics(solrMetricsContext, null);
 
     if (isZooKeeperAware()) {
       metricManager.loadClusterReporters(metricReporters, this);
+      packageLoader = new PackageLoader(this);
+      containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().editAPI), Collections.EMPTY_MAP);
+      containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().readAPI), Collections.EMPTY_MAP);
     }
     packageLoader = new PackageLoader(this);
     containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().editAPI), Collections.EMPTY_MAP);
     containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().readAPI), Collections.EMPTY_MAP);
 
 
+
     // setup executor to load cores in parallel
     ExecutorService coreLoadExecutor = MetricUtils.instrumentedExecutorService(
         ExecutorUtil.newMDCAwareFixedThreadPool(
@@ -828,7 +835,7 @@ public class CoreContainer {
       // initialize this handler here when SolrCloudManager is ready
       autoScalingHandler = new AutoScalingHandler(getZkController().getSolrCloudManager(), loader);
       containerHandlers.put(AutoScalingHandler.HANDLER_PATH, autoScalingHandler);
-      autoScalingHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, AutoScalingHandler.HANDLER_PATH);
+      autoScalingHandler.initializeMetrics(solrMetricsContext, AutoScalingHandler.HANDLER_PATH);
     }
     // This is a bit redundant but these are two distinct concepts for all they're accomplished at the same time.
     status |= LOAD_COMPLETE | INITIAL_CORE_LOAD_COMPLETE;
@@ -876,7 +883,7 @@ public class CoreContainer {
     metricsHistoryHandler = new MetricsHistoryHandler(name, metricsHandler,
         client, cloudManager, initArgs);
     containerHandlers.put(METRICS_HISTORY_PATH, metricsHistoryHandler);
-    metricsHistoryHandler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, METRICS_HISTORY_PATH);
+    metricsHistoryHandler.initializeMetrics(solrMetricsContext, METRICS_HISTORY_PATH);
   }
 
   public void securityNodeChanged() {
@@ -1217,15 +1224,15 @@ public class CoreContainer {
    *                     that calls solrCores.waitAddPendingCoreOps(...) and solrCores.removeFromPendingOps(...)
    *
    *                     <pre>
-   *                                           <code>
-   *                                           try {
-   *                                              solrCores.waitAddPendingCoreOps(dcore.getName());
-   *                                              createFromDescriptor(...);
-   *                                           } finally {
-   *                                              solrCores.removeFromPendingOps(dcore.getName());
-   *                                           }
-   *                                           </code>
-   *                                         </pre>
+   *                                                               <code>
+   *                                                               try {
+   *                                                                  solrCores.waitAddPendingCoreOps(dcore.getName());
+   *                                                                  createFromDescriptor(...);
+   *                                                               } finally {
+   *                                                                  solrCores.removeFromPendingOps(dcore.getName());
+   *                                                               }
+   *                                                               </code>
+   *                                                             </pre>
    *                     <p>
    *                     Trying to put the waitAddPending... in this method results in Bad Things Happening due to race conditions.
    *                     getCore() depends on getting the core returned _if_ it's in the pending list due to some other thread opening it.
@@ -1798,7 +1805,7 @@ public class CoreContainer {
       containerHandlers.put(path, (SolrRequestHandler) handler);
     }
     if (handler instanceof SolrMetricProducer) {
-      ((SolrMetricProducer) handler).initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, path);
+      ((SolrMetricProducer) handler).initializeMetrics(solrMetricsContext, path);
     }
     return handler;
   }
diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
index 464b030..942f429 100644
--- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
@@ -53,8 +53,8 @@ import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.store.blockcache.BlockCache;
 import org.apache.solr.store.blockcache.BlockDirectory;
 import org.apache.solr.store.blockcache.BlockDirectoryCache;
@@ -141,6 +141,13 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
     }
     tmpFsCache.invalidateAll();
     tmpFsCache.cleanUp();
+    try {
+      SolrMetricProducer.super.close();
+      MetricsHolder.metrics.close();
+      LocalityHolder.reporter.close();
+    } catch (Exception e) {
+      throw new IOException(e);
+    }
   }
 
   private final static class LocalityHolder {
@@ -497,9 +504,9 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    MetricsHolder.metrics.initializeMetrics(manager, registry, tag, scope);
-    LocalityHolder.reporter.initializeMetrics(manager, registry, tag, scope);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    MetricsHolder.metrics.initializeMetrics(parentContext, scope);
+    LocalityHolder.reporter.initializeMetrics(parentContext, scope);
   }
 
   @Override
diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java
index bfdfa20..f547d10 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginBag.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java
@@ -29,6 +29,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Supplier;
 import java.util.stream.Collectors;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
@@ -46,6 +47,7 @@ import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.pkg.PackagePluginHolder;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.update.processor.UpdateRequestProcessorChain;
+import org.apache.solr.update.processor.UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder;
 import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
 import org.apache.solr.util.CryptoKeys;
 import org.apache.solr.util.SimplePostTool;
@@ -140,7 +142,10 @@ public class PluginBag<T> implements AutoCloseable {
       return new LazyPluginHolder<T>(meta, info, core, core.getResourceLoader(), false);
     } else {
       if (info.pkgName != null) {
-        return new PackagePluginHolder<>(info, core, meta);
+        PackagePluginHolder<T> holder = new PackagePluginHolder<>(info, core, meta);
+        return meta.clazz == UpdateRequestProcessorFactory.class ?
+            new PluginHolder(info, new LazyUpdateProcessorFactoryHolder(holder)) :
+            holder;
       } else {
         T inst = core.createInstance(info.className, (Class<T>) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader(info.pkgName));
         initInstance(inst, info);
@@ -149,8 +154,10 @@ public class PluginBag<T> implements AutoCloseable {
     }
   }
 
-  /** make a plugin available in an alternate name. This is an internal API and not for public use
-   * @param src key in which the plugin is already registered
+  /**
+   * make a plugin available in an alternate name. This is an internal API and not for public use
+   *
+   * @param src    key in which the plugin is already registered
    * @param target the new key in which the plugin should be aliased to. If target exists already, the alias fails
    * @return flag if the operation is successful or not
    */
@@ -200,7 +207,7 @@ public class PluginBag<T> implements AutoCloseable {
     return old == null ? null : old.get();
   }
 
-  PluginHolder<T> put(String name, PluginHolder<T> plugin) {
+  public PluginHolder<T> put(String name, PluginHolder<T> plugin) {
     Boolean registerApi = null;
     Boolean disableHandler = null;
     if (plugin.pluginInfo != null) {
@@ -236,11 +243,15 @@ public class PluginBag<T> implements AutoCloseable {
           apiBag.registerLazy((PluginHolder<SolrRequestHandler>) plugin, plugin.pluginInfo);
       }
     }
-    if(disableHandler == null) disableHandler = Boolean.FALSE;
+    if (disableHandler == null) disableHandler = Boolean.FALSE;
     PluginHolder<T> old = null;
-    if(!disableHandler) old = registry.put(name, plugin);
+    if (!disableHandler) old = registry.put(name, plugin);
     if (plugin.pluginInfo != null && plugin.pluginInfo.isDefault()) setDefault(name);
     if (plugin.isLoaded()) registerMBean(plugin.get(), core, name);
+    // old instance has been replaced - close it to prevent mem leaks
+    if (old != null && old != plugin) {
+      closeQuietly(old);
+    }
     return old;
   }
 
@@ -329,11 +340,19 @@ public class PluginBag<T> implements AutoCloseable {
     }
   }
 
+  public static void closeQuietly(Object inst)  {
+    try {
+      if (inst != null && inst instanceof AutoCloseable) ((AutoCloseable) inst).close();
+    } catch (Exception e) {
+      log.error("Error closing "+ inst , e);
+    }
+  }
+
   /**
    * An indirect reference to a plugin. It just wraps a plugin instance.
    * subclasses may choose to lazily load the plugin
    */
-  public static class PluginHolder<T> implements AutoCloseable {
+  public static class PluginHolder<T> implements Supplier<T>,  AutoCloseable {
     protected T inst;
     protected final PluginInfo pluginInfo;
     boolean registerAPI = false;
diff --git a/solr/core/src/java/org/apache/solr/core/PluginInfo.java b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
index 366309e..bb290e1 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginInfo.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
@@ -60,6 +60,11 @@ public class PluginInfo implements MapSerializable {
     this.children = children == null ? Collections.<PluginInfo>emptyList(): unmodifiableList(children);
     isFromSolrConfig = false;
   }
+
+  /** class names can be prefixed with package name e.g: my_package:my.pkg.Class
+   * This checks if it is a package name prefixed classname.
+   * the return value has first = package name & second = class name
+   */
   static Pair<String,String > parseClassName(String name) {
     String pkgName = null;
     String className = name;
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 8315047..09067b1 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -108,8 +108,8 @@ import org.apache.solr.handler.component.HighlightComponent;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.logging.MDCLoggingContext;
 import org.apache.solr.metrics.SolrCoreMetricManager;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.pkg.PackageListeners;
 import org.apache.solr.pkg.PackageLoader;
 import org.apache.solr.request.SolrQueryRequest;
@@ -233,7 +233,8 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
   private final CoreContainer coreContainer;
 
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private String metricTag = Integer.toHexString(hashCode());
+  private final String metricTag = SolrMetricProducer.getUniqueMetricTag(this, null);
+  private final SolrMetricsContext solrMetricsContext;
 
   public volatile boolean searchEnabled = true;
   public volatile boolean indexEnabled = true;
@@ -282,6 +283,9 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
     return resourceLoader;
   }
 
+  /** Gets the SolrResourceLoader for a given package
+   * @param pkg The package name
+   */
   public SolrResourceLoader getResourceLoader(String pkg) {
     if (pkg == null) {
       return resourceLoader;
@@ -936,6 +940,7 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
       this.configSetProperties = configSetProperties;
       // Initialize the metrics manager
       this.coreMetricManager = initCoreMetricManager(config);
+      solrMetricsContext = coreMetricManager.getSolrMetricsContext();
       this.coreMetricManager.loadReporters();
 
       if (updateHandler == null) {
@@ -957,15 +962,13 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
 
       checkVersionFieldExistsInSchema(schema, coreDescriptor);
 
-      SolrMetricManager metricManager = coreContainer.getMetricManager();
-
       // initialize searcher-related metrics
-      initializeMetrics(metricManager, coreMetricManager.getRegistryName(), metricTag, null);
+      initializeMetrics(solrMetricsContext, null);
 
       SolrFieldCacheBean solrFieldCacheBean = new SolrFieldCacheBean();
       // this is registered at the CONTAINER level because it's not core-specific - for now we
       // also register it here for back-compat
-      solrFieldCacheBean.initializeMetrics(metricManager, coreMetricManager.getRegistryName(), metricTag, "core");
+      solrFieldCacheBean.initializeMetrics(solrMetricsContext, "core");
       infoRegistry.put("fieldCache", solrFieldCacheBean);
 
       initSchema(config, schema);
@@ -1032,8 +1035,7 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
 
       // Allow the directory factory to report metrics
       if (directoryFactory instanceof SolrMetricProducer) {
-        ((SolrMetricProducer) directoryFactory).initializeMetrics(metricManager, coreMetricManager.getRegistryName(),
-            metricTag, "directoryFactory");
+        ((SolrMetricProducer) directoryFactory).initializeMetrics(solrMetricsContext, "directoryFactory");
       }
 
       // seed version buckets with max from index during core initialization ... requires a searcher!
@@ -1180,61 +1182,66 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    newSearcherCounter = manager.counter(this, registry, "new", Category.SEARCHER.toString());
-    newSearcherTimer = manager.timer(this, registry, "time", Category.SEARCHER.toString(), "new");
-    newSearcherWarmupTimer = manager.timer(this, registry, "warmup", Category.SEARCHER.toString(), "new");
-    newSearcherMaxReachedCounter = manager.counter(this, registry, "maxReached", Category.SEARCHER.toString(), "new");
-    newSearcherOtherErrorsCounter = manager.counter(this, registry, "errors", Category.SEARCHER.toString(), "new");
-
-    manager.registerGauge(this, registry, () -> name == null ? "(null)" : name, getMetricTag(), true, "coreName", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> startTime, getMetricTag(), true, "startTime", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> getOpenCount(), getMetricTag(), true, "refCount", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> resourceLoader.getInstancePath().toString(), getMetricTag(), true, "instanceDir", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> isClosed() ? "(closed)" : getIndexDir(), getMetricTag(), true, "indexDir", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> isClosed() ? 0 : getIndexSize(), getMetricTag(), true, "sizeInBytes", Category.INDEX.toString());
-    manager.registerGauge(this, registry, () -> isClosed() ? "(closed)" : NumberUtils.readableSize(getIndexSize()), getMetricTag(), true, "size", Category.INDEX.toString());
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    newSearcherCounter = parentContext.counter(this, "new", Category.SEARCHER.toString());
+    newSearcherTimer = parentContext.timer(this, "time", Category.SEARCHER.toString(), "new");
+    newSearcherWarmupTimer = parentContext.timer(this, "warmup", Category.SEARCHER.toString(), "new");
+    newSearcherMaxReachedCounter = parentContext.counter(this, "maxReached", Category.SEARCHER.toString(), "new");
+    newSearcherOtherErrorsCounter = parentContext.counter(this, "errors", Category.SEARCHER.toString(), "new");
+
+    parentContext.gauge(this, () -> name == null ? "(null)" : name, true, "coreName", Category.CORE.toString());
+    parentContext.gauge(this, () -> startTime, true, "startTime", Category.CORE.toString());
+    parentContext.gauge(this, () -> getOpenCount(), true, "refCount", Category.CORE.toString());
+    parentContext.gauge(this, () -> resourceLoader.getInstancePath().toString(), true, "instanceDir", Category.CORE.toString());
+    parentContext.gauge(this, () -> isClosed() ? "(closed)" : getIndexDir(), true, "indexDir", Category.CORE.toString());
+    parentContext.gauge(this, () -> isClosed() ? 0 : getIndexSize(), true, "sizeInBytes", Category.INDEX.toString());
+    parentContext.gauge(this, () -> isClosed() ? "(closed)" : NumberUtils.readableSize(getIndexSize()), true, "size", Category.INDEX.toString());
     if (coreContainer != null) {
-      manager.registerGauge(this, registry, () -> coreContainer.getNamesForCore(this), getMetricTag(), true, "aliases", Category.CORE.toString());
+      parentContext.gauge(this, () -> coreContainer.getNamesForCore(this), true, "aliases", Category.CORE.toString());
       final CloudDescriptor cd = getCoreDescriptor().getCloudDescriptor();
       if (cd != null) {
-        manager.registerGauge(this, registry, () -> {
+        parentContext.gauge(this, () -> {
           if (cd.getCollectionName() != null) {
             return cd.getCollectionName();
           } else {
             return "_notset_";
           }
-        }, getMetricTag(), true, "collection", Category.CORE.toString());
+        }, true, "collection", Category.CORE.toString());
 
-        manager.registerGauge(this, registry, () -> {
+        parentContext.gauge(this, () -> {
           if (cd.getShardId() != null) {
             return cd.getShardId();
           } else {
             return "_auto_";
           }
-        }, getMetricTag(), true, "shard", Category.CORE.toString());
+        }, true, "shard", Category.CORE.toString());
       }
     }
     // initialize disk total / free metrics
     Path dataDirPath = Paths.get(dataDir);
     File dataDirFile = dataDirPath.toFile();
-    manager.registerGauge(this, registry, () -> dataDirFile.getTotalSpace(), getMetricTag(), true, "totalSpace", Category.CORE.toString(), "fs");
-    manager.registerGauge(this, registry, () -> dataDirFile.getUsableSpace(), getMetricTag(), true, "usableSpace", Category.CORE.toString(), "fs");
-    manager.registerGauge(this, registry, () -> dataDirPath.toAbsolutePath().toString(), getMetricTag(), true, "path", Category.CORE.toString(), "fs");
-    manager.registerGauge(this, registry, () -> {
+    parentContext.gauge(this, () -> dataDirFile.getTotalSpace(), true, "totalSpace", Category.CORE.toString(), "fs");
+    parentContext.gauge(this, () -> dataDirFile.getUsableSpace(), true, "usableSpace", Category.CORE.toString(), "fs");
+    parentContext.gauge(this, () -> dataDirPath.toAbsolutePath().toString(), true, "path", Category.CORE.toString(), "fs");
+    parentContext.gauge(this, () -> {
       try {
         return org.apache.lucene.util.IOUtils.spins(dataDirPath.toAbsolutePath());
       } catch (IOException e) {
         // default to spinning
         return true;
       }
-    }, getMetricTag(), true, "spins", Category.CORE.toString(), "fs");
+    }, true, "spins", Category.CORE.toString(), "fs");
   }
 
   public String getMetricTag() {
     return metricTag;
   }
 
+  @Override
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
   private void checkVersionFieldExistsInSchema(IndexSchema schema, CoreDescriptor coreDescriptor) {
     if (null != coreDescriptor.getCloudDescriptor()) {
       // we are evidently running in cloud mode.  
diff --git a/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java b/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java
index bfb3428..d7ddd86 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrInfoBean.java
@@ -20,7 +20,8 @@ import java.util.Map;
 import java.util.Set;
 
 import com.codahale.metrics.MetricRegistry;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.util.stats.MetricUtils;
 
 /**
@@ -77,13 +78,17 @@ public interface SolrInfoBean {
    * (default is null, which means no registry).
    */
   default MetricRegistry getMetricRegistry() {
+    if (this instanceof SolrMetricProducer) {
+      SolrMetricsContext context = ((SolrMetricProducer)this).getSolrMetricsContext();
+      return context != null ? context.getMetricRegistry() : null;
+    }
     return null;
   }
 
   /** Register a metric name that this component reports. This method is called by various
    * metric registration methods in {@link org.apache.solr.metrics.SolrMetricManager} in order
    * to capture what metric names are reported from this component (which in turn is called
-   * from {@link org.apache.solr.metrics.SolrMetricProducer#initializeMetrics(SolrMetricManager, String, String, String)}).
+   * from {@link org.apache.solr.metrics.SolrMetricProducer#initializeMetrics(SolrMetricsContext, String)}).
    * <p>Default implementation registers all metrics added by a component. Implementations may
    * override this to avoid reporting some or all metrics returned by {@link #getMetricsSnapshot()}</p>
    */
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index cc1ef7a..a57660e 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -86,7 +86,6 @@ import org.slf4j.LoggerFactory;
 public class SolrResourceLoader implements ResourceLoader, Closeable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private String name = "";
   static final String project = "solr";
   static final String base = "org.apache" + "." + project;
   static final String[] packages = {
@@ -97,7 +96,10 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
   };
   private static final java.lang.String SOLR_CORE_NAME = "solr.core.name";
   private static Set<String> loggedOnce = new ConcurrentSkipListSet<>();
+  private static final Charset UTF_8 = StandardCharsets.UTF_8;
+
 
+  private String name = "";
   protected URLClassLoader classLoader;
   private final Path instanceDir;
   private String dataDir;
@@ -105,7 +107,6 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
   private final List<SolrCoreAware> waitingForCore = Collections.synchronizedList(new ArrayList<SolrCoreAware>());
   private final List<SolrInfoBean> infoMBeans = Collections.synchronizedList(new ArrayList<SolrInfoBean>());
   private final List<ResourceLoaderAware> waitingForResources = Collections.synchronizedList(new ArrayList<ResourceLoaderAware>());
-  private static final Charset UTF_8 = StandardCharsets.UTF_8;
 
   private final Properties coreProperties;
 
diff --git a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
index ded379f..cd653e7 100644
--- a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
+++ b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
@@ -115,7 +115,7 @@ public class DistribPackageStore implements PackageStore {
         if (!parent.exists()) {
           parent.mkdirs();
         }
-        Map m = (Map) Utils.fromJSON(meta.array());
+        Map m = (Map) Utils.fromJSON(meta.array(), meta.arrayOffset(), meta.limit());
         if (m == null || m.isEmpty()) {
           throw new SolrException(SERVER_ERROR, "invalid metadata , discarding : " + path);
         }
@@ -187,7 +187,7 @@ public class DistribPackageStore implements PackageStore {
         metadata = Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
             baseUrl + "/node/files" + getMetaPath(),
             Utils.newBytesConsumer((int) MAX_PKG_SIZE));
-        m = (Map) Utils.fromJSON(metadata.array());
+        m = (Map) Utils.fromJSON(metadata.array(), metadata.arrayOffset(), metadata.limit());
       } catch (SolrException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error fetching metadata", e);
       }
@@ -354,26 +354,21 @@ public class DistribPackageStore implements PackageStore {
           Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), url, null);
         } catch (Exception e) {
           log.info("Node: " + node +
-              " failed to respond for blob notification", e);
+              " failed to respond for file fetch notification", e);
           //ignore the exception
           // some nodes may be down or not responding
         }
         i++;
       }
     } finally {
-      new Thread(() -> {
+      coreContainer.getUpdateShardHandler().getUpdateExecutor().submit(() -> {
         try {
-          // keep the jar in memory for 10 secs , so that
-          //every node can download it from memory without the file system
           Thread.sleep(10 * 1000);
-        } catch (Exception e) {
-          //don't care
         } finally {
           tmpFiles.remove(entry.getPath());
         }
-      }).start();
-
-
+        return null;
+      });
     }
 
   }
diff --git a/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
index a51b366..7e80b9a 100644
--- a/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
+++ b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
@@ -198,7 +198,7 @@ public class PackageStoreAPI {
         cryptoKeys = new CryptoKeys(keys);
       } catch (Exception e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-            "Error parsing public keyts in ZooKeeper");
+            "Error parsing public keys in ZooKeeper");
       }
       for (String sig : sigs) {
         if (cryptoKeys.verify(sig, buf) == null) {
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index dc1d1b1..6f905c7 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -92,7 +92,7 @@ import org.apache.solr.core.backup.repository.BackupRepository;
 import org.apache.solr.core.backup.repository.LocalFileSystemRepository;
 import org.apache.solr.handler.IndexFetcher.IndexFetchResult;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.search.SolrIndexSearcher;
@@ -863,21 +863,20 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    super.initializeMetrics(manager, registry, tag, scope);
-
-    manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? NumberUtils.readableSize(core.getIndexSize()) : ""),
-        tag, true, "indexSize", getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? getIndexVersion().toString() : ""),
-        tag, true, "indexVersion", getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? getIndexVersion().generation : 0),
-        tag, true, GENERATION, getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> (core != null && !core.isClosed() ? core.getIndexDir() : ""),
-        tag, true, "indexPath", getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> isMaster,
-        tag, true, "isMaster", getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> isSlave,
-        tag, true, "isSlave", getCategory().toString(), scope);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    super.initializeMetrics(parentContext, scope);
+    solrMetricsContext.gauge(this,  () -> (core != null && !core.isClosed() ? NumberUtils.readableSize(core.getIndexSize()) : ""),
+        true, "indexSize", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> (core != null && !core.isClosed() ? getIndexVersion().toString() : ""),
+         true, "indexVersion", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> (core != null && !core.isClosed() ? getIndexVersion().generation : 0),
+        true, GENERATION, getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> (core != null && !core.isClosed() ? core.getIndexDir() : ""),
+        true, "indexPath", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> isMaster,
+         true, "isMaster", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> isSlave,
+         true, "isSlave", getCategory().toString(), scope);
     final MetricsMap fetcherMap = new MetricsMap((detailed, map) -> {
       IndexFetcher fetcher = currentIndexFetcher;
       if (fetcher != null) {
@@ -906,13 +905,13 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         addVal(map, IndexFetcher.CONF_FILES_REPLICATED, props, String.class);
       }
     });
-    manager.registerGauge(this, registry, fetcherMap, tag, true, "fetcher", getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> isMaster && includeConfFiles != null ? includeConfFiles : "",
-        tag, true, "confFilesToReplicate", getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> isMaster ? getReplicateAfterStrings() : Collections.<String>emptyList(),
-        tag, true, REPLICATE_AFTER, getCategory().toString(), scope);
-    manager.registerGauge(this, registry, () -> isMaster && replicationEnabled.get(),
-        tag, true, "replicationEnabled", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this , fetcherMap, true, "fetcher", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> isMaster && includeConfFiles != null ? includeConfFiles : "",
+         true, "confFilesToReplicate", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> isMaster ? getReplicateAfterStrings() : Collections.<String>emptyList(),
+        true, REPLICATE_AFTER, getCategory().toString(), scope);
+    solrMetricsContext.gauge(this,  () -> isMaster && replicationEnabled.get(),
+        true, "replicationEnabled", getCategory().toString(), scope);
   }
 
   //TODO Should a failure retrieving any piece of info mark the overall request as a failure?  Is there a core set of values that are required to make a response here useful?
@@ -1387,7 +1386,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     });
   }
 
-  public void close() {
+  public void shutdown() {
     if (executorService != null) executorService.shutdown();
     if (pollingIndexFetcher != null) {
       pollingIndexFetcher.destroy();
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index eca391b..4d9e96b 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -22,11 +22,13 @@ import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
-import com.codahale.metrics.MetricRegistry;
-import com.google.common.collect.ImmutableList;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.Timer;
+import com.google.common.collect.ImmutableList;
+import org.apache.solr.api.Api;
+import org.apache.solr.api.ApiBag;
+import org.apache.solr.api.ApiSupport;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
@@ -36,16 +38,13 @@ import org.apache.solr.core.PluginBag;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.util.SolrPluginUtils;
-import org.apache.solr.api.Api;
-import org.apache.solr.api.ApiBag;
-import org.apache.solr.api.ApiSupport;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -79,9 +78,7 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
   private PluginInfo pluginInfo;
 
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private MetricRegistry registry;
-  protected String registryName;
-  protected SolrMetricManager metricManager;
+  protected SolrMetricsContext solrMetricsContext;
 
 
   @SuppressForbidden(reason = "Need currentTimeMillis, used only for stats output")
@@ -144,21 +141,24 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, final String scope) {
-    this.metricManager = manager;
-    this.registryName = registryName;
-    this.registry = manager.registry(registryName);
-    numErrors = manager.meter(this, registryName, "errors", getCategory().toString(), scope);
-    numServerErrors = manager.meter(this, registryName, "serverErrors", getCategory().toString(), scope);
-    numClientErrors = manager.meter(this, registryName, "clientErrors", getCategory().toString(), scope);
-    numTimeouts = manager.meter(this, registryName, "timeouts", getCategory().toString(), scope);
-    requests = manager.counter(this, registryName, "requests", getCategory().toString(), scope);
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.solrMetricsContext = parentContext.getChildContext(this);
+    numErrors = solrMetricsContext.meter(this, "errors", getCategory().toString(), scope);
+    numServerErrors = solrMetricsContext.meter(this, "serverErrors", getCategory().toString(), scope);
+    numClientErrors = solrMetricsContext.meter(this, "clientErrors", getCategory().toString(), scope);
+    numTimeouts = solrMetricsContext.meter(this, "timeouts", getCategory().toString(), scope);
+    requests = solrMetricsContext.counter(this, "requests", getCategory().toString(), scope);
     MetricsMap metricsMap = new MetricsMap((detail, map) ->
         shardPurposes.forEach((k, v) -> map.put(k, v.getCount())));
-    manager.registerGauge(this, registryName, metricsMap, tag, true, "shardRequests", getCategory().toString(), scope);
-    requestTimes = manager.timer(this, registryName, "requestTimes", getCategory().toString(), scope);
-    totalTime = manager.counter(this, registryName, "totalTime", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> handlerStart, tag, true, "handlerStart", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, metricsMap, true, "shardRequests", getCategory().toString(), scope);
+    requestTimes = solrMetricsContext.timer(this,"requestTimes", getCategory().toString(), scope);
+    totalTime = solrMetricsContext.counter(this, "totalTime", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> handlerStart, true, "handlerStart", getCategory().toString(), scope);
   }
 
   public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) {
@@ -273,11 +273,6 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
-  }
-
-  @Override
   public SolrRequestHandler getSubHandler(String subPath) {
     return null;
   }
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index 03b9600..004da31 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -153,7 +153,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
     NamedList configSetProperties = core.getConfigSetProperties();
     if (configSetProperties == null) return false;
     Object immutable = configSetProperties.get(IMMUTABLE_CONFIGSET_ARG);
-    return immutable != null ? Boolean.parseBoolean(immutable.toString()) : false;
+    return immutable != null && Boolean.parseBoolean(immutable.toString());
   }
 
 
@@ -249,10 +249,13 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
                 Object o = map.get(componentName);
                 val.put(parts.get(1), makeMap(componentName, o));
                 if (req.getParams().getBool("meta", false)) {
+                  // meta=true is asking for the package info of the plugin
+                  // We go through all the listeners and see if there is one registered for this plugin
                   List<PackageListeners.Listener> listeners = req.getCore().getPackageListeners().getListeners();
                   for (PackageListeners.Listener listener :
                       listeners) {
                     PluginInfo info = listener.pluginInfo();
+                    if(info == null) continue;
                     if (info.type.equals(parts.get(1)) && info.name.equals(componentName)) {
                       if (o instanceof Map) {
                         Map m1 = (Map) o;
@@ -306,13 +309,13 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
         pluginInfo = ((PluginInfo) plugin).toMap(new LinkedHashMap<>());
       }
       String useParams = (String) pluginInfo.get(USEPARAM);
-      String useparamsInReq = req.getOriginalParams().get(USEPARAM);
-      if (useParams != null || useparamsInReq != null) {
+      String useParamsInReq = req.getOriginalParams().get(USEPARAM);
+      if (useParams != null || useParamsInReq != null) {
         Map m = new LinkedHashMap<>();
         pluginInfo.put("_useParamsExpanded_", m);
         List<String> params = new ArrayList<>();
         if (useParams != null) params.addAll(StrUtils.splitSmart(useParams, ','));
-        if (useparamsInReq != null) params.addAll(StrUtils.splitSmart(useparamsInReq, ','));
+        if (useParamsInReq != null) params.addAll(StrUtils.splitSmart(useParamsInReq, ','));
         for (String param : params) {
           RequestParams.ParamSet p = this.req.getCore().getSolrConfig().getRequestParams().getParams(param);
           if (p != null) {
@@ -428,10 +431,14 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
             if (op.hasError()) break;
             for (String s : name) {
               if (params.getParams(s) == null) {
-                op.addError(formatString("can't delete . No such params ''{0}'' exist", s));
+                op.addError(formatString("Could not delete. No such params ''{0}'' exist", s));
               }
               params = params.setParams(s, null);
             }
+            break;
+          }
+          default: {
+            op.unknownOperation();
           }
         }
       }
@@ -518,7 +525,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
       } else {
         SolrResourceLoader.persistConfLocally(loader, ConfigOverlay.RESOURCE_NAME, overlay.toByteArray());
         req.getCore().getCoreContainer().reload(req.getCore().getName());
-        log.debug("Executed config commands successfully and persited to File System {}", ops);
+        log.info("Executed config commands successfully and persisted to File System {}", ops);
       }
 
     }
@@ -785,7 +792,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
 
         if (!success) {
           String coreUrl = concurrentTasks.get(f).coreUrl;
-          log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion);
+          log.warn("Core " + coreUrl + " could not get the expected version " + expectedVersion);
           if (failedList == null) failedList = new ArrayList<>();
           failedList.add(coreUrl);
         }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
index cd6cd38..589a1ca 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
@@ -46,6 +46,7 @@ import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.logging.MDCLoggingContext;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.security.AuthorizationContext;
@@ -120,10 +121,10 @@ public class CoreAdminHandler extends RequestHandlerBase implements PermissionNa
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    super.initializeMetrics(manager, registryName, tag, scope);
-    parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, this, manager.registry(registryName),
-        SolrMetricManager.mkName("parallelCoreAdminExecutor", getCategory().name(),scope, "threadPool"));
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    super.initializeMetrics(parentContext, scope);
+    parallelExecutor = MetricUtils.instrumentedExecutorService(parallelExecutor, this, solrMetricsContext.getMetricRegistry(),
+        SolrMetricManager.mkName("parallelCoreAdminExecutor", getCategory().name(), scope, "threadPool"));
   }
   @Override
   public Boolean registerV2() {
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
index 81e6c92..5739651 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
@@ -55,6 +55,7 @@ import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.params.CoreAdminParams.COLLECTION;
 import static org.apache.solr.common.params.CoreAdminParams.CoreAdminAction.*;
 import static org.apache.solr.common.params.CoreAdminParams.REPLICA;
+import static org.apache.solr.common.params.CoreAdminParams.REPLICA_TYPE;
 import static org.apache.solr.common.params.CoreAdminParams.SHARD;
 import static org.apache.solr.handler.admin.CoreAdminHandler.COMPLETED;
 import static org.apache.solr.handler.admin.CoreAdminHandler.CallInfo;
@@ -333,6 +334,7 @@ enum CoreAdminOperation implements CoreAdminOp {
               cloudInfo.add(COLLECTION, core.getCoreDescriptor().getCloudDescriptor().getCollectionName());
               cloudInfo.add(SHARD, core.getCoreDescriptor().getCloudDescriptor().getShardId());
               cloudInfo.add(REPLICA, core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName());
+              cloudInfo.add(REPLICA_TYPE, core.getCoreDescriptor().getCloudDescriptor().getReplicaType().name());
               info.add("cloud", cloudInfo);
             }
             if (isIndexInfoNeeded) {
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
index 85f0ae5..5bf8a3a 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
@@ -78,6 +78,7 @@ import org.apache.solr.search.DocIterator;
 import org.apache.solr.search.DocList;
 import org.apache.solr.search.DocSlice;
 import org.apache.solr.search.QParser;
+import org.apache.solr.search.ReturnFields;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.search.SortSpecParsing;
 import org.apache.solr.search.SyntaxError;
@@ -406,15 +407,15 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
       collector = groupExpandCollector;
     }
 
-    if (pfilter.filter == null) {
-      searcher.search(query, collector);
-    } else {
-      Query q = new BooleanQuery.Builder()
+    if (pfilter.filter != null) {
+      query = new BooleanQuery.Builder()
           .add(query, Occur.MUST)
           .add(pfilter.filter, Occur.FILTER)
           .build();
-      searcher.search(q, collector);
     }
+    searcher.search(query, collector);
+
+    ReturnFields returnFields = rb.rsp.getReturnFields();
     LongObjectMap<Collector> groups = ((GroupCollector) groupExpandCollector).getGroups();
     NamedList outMap = new SimpleOrderedMap();
     CharsRefBuilder charsRef = new CharsRefBuilder();
@@ -424,6 +425,9 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
       TopDocs topDocs = topDocsCollector.topDocs();
       ScoreDoc[] scoreDocs = topDocs.scoreDocs;
       if (scoreDocs.length > 0) {
+        if (returnFields.wantsScore() && sort != null) {
+          TopFieldCollector.populateScores(scoreDocs, searcher, query);
+        }
         int[] docs = new int[scoreDocs.length];
         float[] scores = new float[scoreDocs.length];
         for (int i = 0; i < docs.length; i++) {
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
index 844acf3..05bcee5 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
@@ -73,6 +73,7 @@ import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.security.HttpClientBuilderPlugin;
 import org.apache.solr.update.UpdateShardHandlerConfig;
@@ -115,6 +116,7 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory implements org.
   float permittedLoadBalancerRequestsMaximumFraction = 1.0f;
   boolean accessPolicy = false;
   private WhitelistHostChecker whitelistHostChecker = null;
+  private SolrMetricsContext solrMetricsContext;
 
   private String scheme = null;
 
@@ -358,6 +360,16 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory implements org.
         }
       }
     }
+    try {
+      SolrMetricProducer.super.close();
+    } catch (Exception e) {
+      log.warn("Exception closing.", e);
+    }
+  }
+
+  @Override
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   /**
@@ -696,11 +708,12 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory implements org.
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     String expandedScope = SolrMetricManager.mkName(scope, SolrInfoBean.Category.QUERY.name());
-    httpListenerFactory.initializeMetrics(manager, registry, tag, expandedScope);
+    httpListenerFactory.initializeMetrics(solrMetricsContext, expandedScope);
     commExecutor = MetricUtils.instrumentedExecutorService(commExecutor, null,
-        manager.registry(registry),
+        solrMetricsContext.getMetricRegistry(),
         SolrMetricManager.mkName("httpShardExecutor", expandedScope, "threadPool"));
   }
   
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
index 64b8c9a..f617fcb 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
@@ -40,6 +40,8 @@ import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.pkg.PackageListeners;
+import org.apache.solr.pkg.PackageLoader;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.search.SolrQueryTimeoutImpl;
@@ -58,11 +60,9 @@ import static org.apache.solr.common.params.CommonParams.PATH;
 
 
 /**
- *
  * Refer SOLR-281
- *
  */
-public class SearchHandler extends RequestHandlerBase implements SolrCoreAware , PluginInfoInitialized, PermissionNameProvider {
+public class SearchHandler extends RequestHandlerBase implements SolrCoreAware, PluginInfoInitialized, PermissionNameProvider {
   static final String INIT_COMPONENTS = "components";
   static final String INIT_FIRST_COMPONENTS = "first-components";
   static final String INIT_LAST_COMPONENTS = "last-components";
@@ -70,22 +70,21 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected volatile List<SearchComponent> components;
-  private ShardHandlerFactory shardHandlerFactory ;
+  private ShardHandlerFactory shardHandlerFactory;
   private PluginInfo shfInfo;
   private SolrCore core;
 
-  protected List<String> getDefaultComponents()
-  {
+  protected List<String> getDefaultComponents() {
     ArrayList<String> names = new ArrayList<>(8);
-    names.add( QueryComponent.COMPONENT_NAME );
-    names.add( FacetComponent.COMPONENT_NAME );
-    names.add( FacetModule.COMPONENT_NAME );
-    names.add( MoreLikeThisComponent.COMPONENT_NAME );
-    names.add( HighlightComponent.COMPONENT_NAME );
-    names.add( StatsComponent.COMPONENT_NAME );
-    names.add( DebugComponent.COMPONENT_NAME );
-    names.add( ExpandComponent.COMPONENT_NAME);
-    names.add( TermsComponent.COMPONENT_NAME);
+    names.add(QueryComponent.COMPONENT_NAME);
+    names.add(FacetComponent.COMPONENT_NAME);
+    names.add(FacetModule.COMPONENT_NAME);
+    names.add(MoreLikeThisComponent.COMPONENT_NAME);
+    names.add(HighlightComponent.COMPONENT_NAME);
+    names.add(StatsComponent.COMPONENT_NAME);
+    names.add(DebugComponent.COMPONENT_NAME);
+    names.add(ExpandComponent.COMPONENT_NAME);
+    names.add(TermsComponent.COMPONENT_NAME);
 
     return names;
   }
@@ -94,7 +93,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
   public void init(PluginInfo info) {
     init(info.initArgs);
     for (PluginInfo child : info.children) {
-      if("shardHandlerFactory".equals(child.type)){
+      if ("shardHandlerFactory".equals(child.type)) {
         this.shfInfo = child;
         break;
       }
@@ -113,8 +112,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
    */
   @Override
   @SuppressWarnings("unchecked")
-  public void inform(SolrCore core)
-  {
+  public void inform(SolrCore core) {
     this.core = core;
     List<String> c = (List<String>) initArgs.get(INIT_COMPONENTS);
     Set<String> missing = new HashSet<>(core.getSearchComponents().checkContains(c));
@@ -143,6 +141,32 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
       });
     }
 
+    if (core.getCoreContainer().isZooKeeperAware()) {
+      core.getPackageListeners().addListener(new PackageListeners.Listener() {
+        @Override
+        public String packageName() {
+          return null;
+        }
+
+        @Override
+        public PluginInfo pluginInfo() {
+          return null;
+        }
+
+        @Override
+        public void changed(PackageLoader.Package pkg) {
+          //we could optimize this by listening to only relevant packages,
+          // but it is not worth optimizing as these are lightweight objects
+          components = null;
+        }
+
+        @Override
+        public PackageLoader.Package.Version getPackageVersion() {
+          return null;
+        }
+      });
+    }
+
   }
 
   private void initComponents() {
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
index 2d6fdb1..d70cf99 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
@@ -48,8 +48,8 @@ import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrEventListener;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.spelling.suggest.SolrSuggester;
 import org.apache.solr.spelling.suggest.SuggesterOptions;
@@ -88,9 +88,8 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   @SuppressWarnings("unchecked")
   protected NamedList initParams;
 
-  protected SolrMetricManager metricManager;
-  protected String registryName;
-  
+  protected SolrMetricsContext metricsContext;
+
   /**
    * Key is the dictionary name used in SolrConfig, value is the corresponding {@link SolrSuggester}
    */
@@ -351,18 +350,22 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    this.registryName = registryName;
-    this.metricManager = manager;
-    registry = manager.registry(registryName);
-    manager.registerGauge(this, registryName, () -> ramBytesUsed(), tag, true, "totalSizeInBytes", getCategory().toString(), scope);
+  public SolrMetricsContext getSolrMetricsContext() {
+    return metricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.metricsContext = parentContext.getChildContext(this);
+
+    this.metricsContext.gauge(this, () -> ramBytesUsed(), true, "totalSizeInBytes", getCategory().toString());
     MetricsMap suggestersMap = new MetricsMap((detailed, map) -> {
       for (Map.Entry<String, SolrSuggester> entry : suggesters.entrySet()) {
         SolrSuggester suggester = entry.getValue();
         map.put(entry.getKey(), suggester.toString());
       }
     });
-    manager.registerGauge(this, registryName, suggestersMap, tag, true, "suggesters", getCategory().toString(), scope);
+    this.metricsContext.gauge(this, suggestersMap, true, "suggesters", getCategory().toString(), scope);
   }
 
   @Override
diff --git a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java
index f813748..8801b48 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java
@@ -50,6 +50,7 @@ import org.apache.solr.common.params.TermsParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.handler.component.HttpShardHandlerFactory.WhitelistHostChecker;
 import org.apache.solr.request.SimpleFacets.CountPair;
 import org.apache.solr.schema.FieldType;
@@ -201,14 +202,14 @@ public class TermsComponent extends SearchComponent {
         SchemaField sf = rb.req.getSchema().getFieldOrNull(field);
         if (sf != null && sf.getType().isPointField()) {
           // FIXME: terms.ttf=true is not supported for pointFields
-          if (lowerStr!=null || upperStr!=null || prefix!=null || regexp!=null) {
+          if (lowerStr != null || upperStr != null || prefix != null || regexp != null) {
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                 String.format(Locale.ROOT, "The terms component does not support Points-based fields with sorting or with parameters %s,%s,%s,%s ", TermsParams.TERMS_LOWER, TermsParams.TERMS_UPPER, TermsParams.TERMS_PREFIX_STR, TermsParams.TERMS_REGEXP_STR));
           }
 
+          PointMerger.ValueIterator valueIterator = new PointMerger.ValueIterator(sf, rb.req.getSearcher().getRawReader().leaves());
+          MutableValue mv = valueIterator.getMutableValue();
           if (sort) {
-            PointMerger.ValueIterator valueIterator = new PointMerger.ValueIterator(sf, rb.req.getSearcher().getRawReader().leaves());
-            MutableValue mv = valueIterator.getMutableValue();
             BoundedTreeSet<CountPair<MutableValue, Integer>> queue = new BoundedTreeSet<>(limit);
 
             for (; ; ) {
@@ -221,7 +222,7 @@ public class TermsComponent extends SearchComponent {
             }
 
             for (CountPair<MutableValue, Integer> item : queue) {
-              fieldTerms.add(item.key.toString(), item.val);
+              fieldTerms.add(Utils.OBJECT_TO_STRING.apply(item.key.toObject()), item.val);
             }
             continue;
           } else {
@@ -229,28 +230,24 @@ public class TermsComponent extends SearchComponent {
             // streaming solution that is deferred until writing the response
             // TODO: we can't use the streaming solution until XML writer supports PushWriter!
             termsResult.add(field, (MapWriter) ew -> {
-              PointMerger.ValueIterator valueIterator = new PointMerger.ValueIterator(sf, rb.req.getSearcher().getRawReader().leaves());
-              MutableValue mv = valueIterator.getMutableValue();
               int num = 0;
               for(;;) {
                 long count = valueIterator.getNextCount();
                 if (count < 0) break;
                 if (count < freqmin || count > freqmax) continue;
                 if (++num > limit) break;
-                ew.put(mv.toString(), (int)count); // match the numeric type of terms
+                ew.put(Utils.OBJECT_TO_STRING.apply(mv.toObject()), (int)count); // match the numeric type of terms
               }
             });
              ***/
 
-            PointMerger.ValueIterator valueIterator = new PointMerger.ValueIterator(sf, rb.req.getSearcher().getRawReader().leaves());
-            MutableValue mv = valueIterator.getMutableValue();
             int num = 0;
             for(;;) {
               long count = valueIterator.getNextCount();
               if (count < 0) break;
               if (count < freqmin || count > freqmax) continue;
               if (++num > limit) break;
-              fieldTerms.add(mv.toString(), (int)count); // match the numeric type of terms
+              fieldTerms.add(Utils.OBJECT_TO_STRING.apply(mv.toObject()), (int)count); // match the numeric type of terms
             }
             continue;
           }
diff --git a/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java b/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java
index 5cbf123..8186b12 100644
--- a/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java
+++ b/solr/core/src/java/org/apache/solr/highlight/HighlightingPluginBase.java
@@ -20,12 +20,11 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
 import com.codahale.metrics.Counter;
-import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 
 /**
  * 
@@ -36,9 +35,7 @@ public abstract class HighlightingPluginBase implements SolrInfoBean, SolrMetric
   protected Counter numRequests;
   protected SolrParams defaults;
   protected Set<String> metricNames = ConcurrentHashMap.newKeySet(1);
-  protected MetricRegistry registry;
-  protected SolrMetricManager metricManager;
-  protected String registryName;
+  protected SolrMetricsContext solrMetricsContext;
 
   public void init(NamedList args) {
     if( args != null ) {
@@ -71,16 +68,14 @@ public abstract class HighlightingPluginBase implements SolrInfoBean, SolrMetric
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    this.registryName = registryName;
-    this.metricManager = manager;
-    registry = manager.registry(registryName);
-    numRequests = manager.counter(this, registryName, "requests", getCategory().toString(), scope);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
+    numRequests = solrMetricsContext.counter(this, "requests", getCategory().toString(), scope);
   }
 }
 
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
index c57a704..be52b5f 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
@@ -32,17 +32,16 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Responsible for collecting metrics from {@link SolrMetricProducer}'s
- * and exposing metrics to {@link SolrMetricReporter}'s.
+ * Helper class for managing registration of {@link SolrMetricProducer}'s
+ * and {@link SolrMetricReporter}'s specific to a {@link SolrCore} instance.
  */
 public class SolrCoreMetricManager implements Closeable {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final SolrCore core;
-  private final String tag;
-  private final SolrMetricManager metricManager;
-  private String registryName;
+  private SolrMetricsContext solrMetricsContext;
+  private SolrMetricManager metricManager;
   private String collectionName;
   private String shardName;
   private String replicaName;
@@ -56,10 +55,10 @@ public class SolrCoreMetricManager implements Closeable {
    */
   public SolrCoreMetricManager(SolrCore core) {
     this.core = core;
-    this.tag = core.getMetricTag();
-    this.metricManager = core.getCoreContainer().getMetricManager();
     initCloudMode();
-    registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
+    metricManager = core.getCoreContainer().getMetricManager();
+    String registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
+    solrMetricsContext = new SolrMetricsContext(metricManager, registryName, core.getMetricTag());
     leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
   }
 
@@ -86,8 +85,8 @@ public class SolrCoreMetricManager implements Closeable {
     CoreContainer coreContainer = core.getCoreContainer();
     NodeConfig nodeConfig = coreContainer.getConfig();
     PluginInfo[] pluginInfos = nodeConfig.getMetricsConfig().getMetricReporters();
-    metricManager.loadReporters(pluginInfos, core.getResourceLoader(), coreContainer, core, tag,
-        SolrInfoBean.Group.core, registryName);
+    metricManager.loadReporters(pluginInfos, core.getResourceLoader(), coreContainer, core, solrMetricsContext.tag,
+        SolrInfoBean.Group.core, solrMetricsContext.registry);
     if (cloudMode) {
       metricManager.loadShardReporters(pluginInfos, core);
     }
@@ -99,19 +98,20 @@ public class SolrCoreMetricManager implements Closeable {
    * This method also reloads reporters so that they use the new core name.
    */
   public void afterCoreSetName() {
-    String oldRegistryName = registryName;
+    String oldRegistryName = solrMetricsContext.registry;
     String oldLeaderRegistryName = leaderRegistryName;
     initCloudMode();
-    registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
+    String newRegistryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
     leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
-    if (oldRegistryName.equals(registryName)) {
+    if (oldRegistryName.equals(newRegistryName)) {
       return;
     }
     // close old reporters
-    metricManager.closeReporters(oldRegistryName, tag);
+    metricManager.closeReporters(oldRegistryName, solrMetricsContext.tag);
     if (oldLeaderRegistryName != null) {
-      metricManager.closeReporters(oldLeaderRegistryName, tag);
+      metricManager.closeReporters(oldLeaderRegistryName, solrMetricsContext.tag);
     }
+    solrMetricsContext = new SolrMetricsContext(metricManager, newRegistryName, solrMetricsContext.tag);
     // load reporters again, using the new core name
     loadReporters();
   }
@@ -127,15 +127,16 @@ public class SolrCoreMetricManager implements Closeable {
       throw new IllegalArgumentException("registerMetricProducer() called with illegal arguments: " +
           "scope = " + scope + ", producer = " + producer);
     }
-    producer.initializeMetrics(metricManager, getRegistryName(), tag, scope);
+    // use deprecated method for back-compat, remove in 9.0
+    producer.initializeMetrics(solrMetricsContext, scope);
   }
 
   /**
    * Return the registry used by this SolrCore.
    */
   public MetricRegistry getRegistry() {
-    if (registryName != null) {
-      return metricManager.registry(registryName);
+    if (solrMetricsContext != null) {
+      return solrMetricsContext.getMetricRegistry();
     } else {
       return null;
     }
@@ -146,11 +147,15 @@ public class SolrCoreMetricManager implements Closeable {
    */
   @Override
   public void close() throws IOException {
-    metricManager.closeReporters(getRegistryName(), tag);
+    metricManager.closeReporters(solrMetricsContext.registry, solrMetricsContext.tag);
     if (getLeaderRegistryName() != null) {
-      metricManager.closeReporters(getLeaderRegistryName(), tag);
+      metricManager.closeReporters(getLeaderRegistryName(), solrMetricsContext.tag);
     }
-    metricManager.unregisterGauges(getRegistryName(), tag);
+    metricManager.unregisterGauges(solrMetricsContext.registry, solrMetricsContext.tag);
+  }
+
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   public SolrCore getCore() {
@@ -175,7 +180,7 @@ public class SolrCoreMetricManager implements Closeable {
    * @return the metric registry name of the manager.
    */
   public String getRegistryName() {
-    return registryName;
+    return solrMetricsContext != null ? solrMetricsContext.registry : null;
   }
 
   /**
@@ -190,7 +195,7 @@ public class SolrCoreMetricManager implements Closeable {
    * Return a tag specific to this instance.
    */
   public String getTag() {
-    return tag;
+    return solrMetricsContext.tag;
   }
 
   public static String createRegistryName(boolean cloud, String collectionName, String shardName, String replicaName, String coreName) {
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index 7d2877d..977b0ca 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -724,20 +724,24 @@ public class SolrMetricManager {
     registerMetric(info, registry, new GaugeWrapper(gauge, tag), force, metricName, metricPath);
   }
 
-  public int unregisterGauges(String registryName, String tag) {
-    if (tag == null) {
+  public int unregisterGauges(String registryName, String tagSegment) {
+    if (tagSegment == null) {
       return 0;
     }
     MetricRegistry registry = registry(registryName);
+    if (registry == null) return 0;
     AtomicInteger removed = new AtomicInteger();
     registry.removeMatching((name, metric) -> {
-      if (metric instanceof GaugeWrapper &&
-          tag.equals(((GaugeWrapper) metric).getTag())) {
-        removed.incrementAndGet();
-        return true;
-      } else {
-        return false;
+      if (metric instanceof GaugeWrapper) {
+        GaugeWrapper wrapper = (GaugeWrapper) metric;
+        boolean toRemove = wrapper.getTag().contains(tagSegment);
+        if (toRemove) {
+          removed.incrementAndGet();
+        }
+        return toRemove;
       }
+      return false;
+
     });
     return removed.get();
   }
@@ -752,10 +756,16 @@ public class SolrMetricManager {
    * segments prepended to the name.
    */
   public static String mkName(String name, String... path) {
+    return makeName(path == null || path.length == 0 ? Collections.emptyList() : Arrays.asList(path),
+        name);
+
+  }
+
+  public static String makeName(List<String> path, String name) {
     if (name == null || name.isEmpty()) {
       throw new IllegalArgumentException("name must not be empty");
     }
-    if (path == null || path.length == 0) {
+    if (path == null || path.size() == 0) {
       return name;
     } else {
       StringBuilder sb = new StringBuilder();
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
index 265d7e4..6479b99 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
@@ -19,17 +19,59 @@ package org.apache.solr.metrics;
 /**
  * Used by objects that expose metrics through {@link SolrMetricManager}.
  */
-public interface SolrMetricProducer {
+public interface SolrMetricProducer extends AutoCloseable {
 
   /**
-   * Initializes metrics specific to this producer
-   * @param manager an instance of {@link SolrMetricManager}
-   * @param registry registry name where metrics are registered
-   * @param tag a symbolic tag that represents this instance of the producer,
-   * or a group of related instances that have the same life-cycle. This tag is
-   * used when managing life-cycle of some metrics and is set when
-   * {@link #initializeMetrics(SolrMetricManager, String, String, String)} is called.
-   * @param scope scope of the metrics (eg. handler name) to separate metrics of
+   * Unique metric tag identifies components with the same life-cycle, which should
+   * be registered / unregistered together. It is in the format of A:B:C, where
+   * A is the parent of B is the parent of C and so on.
+   * If object "B" is unregistered C also must get unregistered.
+   * If object "A" is unregistered B and C also must get unregistered.
+   * @param o object to create a tag for
+   * @param parentName parent object name, or null if no parent exists
    */
-  void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope);
+  static String getUniqueMetricTag(Object o, String parentName) {
+    String name = o.getClass().getSimpleName() + "@" + Integer.toHexString(o.hashCode());
+    if (parentName != null && parentName.contains(name)) {
+      throw new RuntimeException("Parent already includes this component! parent=" + parentName + ", this=" + name);
+    }
+    return parentName == null ?
+        name :
+        parentName + ":" + name;
+  }
+
+  /**
+   * Initialize metrics specific to this producer.
+   * @param parentContext parent metrics context. If this component has the same life-cycle as the parent
+   *                it can simply use the parent context, otherwise it should obtain a child context
+   *                using {@link SolrMetricsContext#getChildContext(Object)} passing <code>this</code>
+   *                as the child object.
+   * @param scope component scope
+   */
+  void initializeMetrics(SolrMetricsContext parentContext, String scope);
+
+  /**
+   * Implementations should override this method to return the context used in
+   * {@link #initializeMetrics(SolrMetricsContext, String)} to ensure proper cleanup of metrics
+   * at the end of the life-cycle of this component. This should be the child context if one was created.
+   * <p>The default implementation returns null, which disables unregistration of metrics on {@link #close()}.</p>
+   */
+  default SolrMetricsContext getSolrMetricsContext() {
+    return null;
+  }
+
+  /**
+   * Implementations should always call <code>SolrMetricProducer.super.close()</code> to ensure that
+   * metrics with the same life-cycle as this component are properly unregistered. This prevents
+   * obscure memory leaks.
+   */
+  @Override
+  default void close() throws Exception {
+    SolrMetricsContext context = getSolrMetricsContext();
+    if (context == null) {
+      return;
+    } else {
+      context.unregister();
+    }
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricsContext.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricsContext.java
new file mode 100644
index 0000000..dd37e1f
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricsContext.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.metrics;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.Timer;
+import org.apache.solr.core.SolrInfoBean;
+
+/**
+ * This class represents a metrics context that ties together components with the same life-cycle
+ * and provides convenient access to the metric registry.
+ */
+public class SolrMetricsContext {
+  public final String registry;
+  public final SolrMetricManager metricManager;
+  public final String tag;
+
+  public SolrMetricsContext(SolrMetricManager metricManager, String registry, String tag) {
+    this.registry = registry;
+    this.metricManager = metricManager;
+    this.tag = tag;
+  }
+
+  /**
+   * Metrics tag that represents objects with the same life-cycle.
+   */
+  public String getTag() {
+    return tag;
+  }
+
+  /**
+   * Unregister all {@link Gauge} metrics that use this context's tag.
+   *
+   * <p><b>NOTE: This method MUST be called at the end of a life-cycle (typically in <code>close()</code>)
+   * of components that register gauge metrics with references to the current object's instance. Failure to
+   * do so may result in hard-to-debug memory leaks.</b></p>
+   */
+  public void unregister() {
+    metricManager.unregisterGauges(registry, tag);
+  }
+
+  /**
+   * Get a context with the same registry name but a tag that represents a parent-child relationship.
+   * Since it's a different tag than the parent's context it is assumed that the life-cycle of the parent
+   * and child are different.
+   * @param child child object that produces metrics with a different life-cycle than the parent.
+   */
+  public SolrMetricsContext getChildContext(Object child) {
+    SolrMetricsContext childContext = new SolrMetricsContext(metricManager, registry, SolrMetricProducer.getUniqueMetricTag(child, tag));
+    return childContext;
+  }
+
+  /**
+   * Convenience method for {@link SolrMetricManager#meter(SolrInfoBean, String, String, String...)}.
+   */
+  public Meter meter(SolrInfoBean info, String metricName, String... metricPath) {
+    return metricManager.meter(info, registry, metricName, metricPath);
+  }
+
+  /**
+   * Convenience method for {@link SolrMetricManager#counter(SolrInfoBean, String, String, String...)}.
+   */
+  public Counter counter(SolrInfoBean info, String metricName, String... metricPath) {
+    return metricManager.counter(info, registry, metricName, metricPath);
+
+  }
+
+  /**
+   * Convenience method for {@link SolrMetricManager#registerGauge(SolrInfoBean, String, Gauge, String, boolean, String, String...)}.
+   */
+  public void gauge(SolrInfoBean info, Gauge<?> gauge, boolean force, String metricName, String... metricPath) {
+    metricManager.registerGauge(info, registry, gauge, tag, force, metricName, metricPath);
+  }
+
+  /**
+   * Convenience method for {@link SolrMetricManager#meter(SolrInfoBean, String, String, String...)}.
+   */
+  public Timer timer(SolrInfoBean info, String metricName, String... metricPath) {
+    return metricManager.timer(info, registry, metricName, metricPath);
+  }
+
+  /**
+   * Convenience method for {@link SolrMetricManager#histogram(SolrInfoBean, String, String, String...)}.
+   */
+  public Histogram histogram(SolrInfoBean info, String metricName, String... metricPath) {
+    return metricManager.histogram(info, registry, metricName, metricPath);
+  }
+
+  /**
+   * Get the MetricRegistry instance that is used for registering metrics in this context.
+   */
+  public MetricRegistry getMetricRegistry() {
+    return metricManager.registry(registry);
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index f9084b4..d38a486 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -32,17 +32,18 @@ import org.apache.solr.api.Command;
 import org.apache.solr.api.EndPoint;
 import org.apache.solr.api.PayloadObj;
 import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.request.beans.Package;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.cloud.ZooKeeperException;
 import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.common.util.ReflectMapWriter;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.filestore.PackageStoreAPI;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.util.ReflectMapWriter;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
@@ -175,9 +176,6 @@ public class PackageAPI {
     @JsonProperty
     public String manifestSHA512;
 
-    public PkgVersion() {
-    }
-
     public PkgVersion(AddVersion addVersion) {
       this.version = addVersion.version;
       this.files = addVersion.files;
@@ -185,6 +183,14 @@ public class PackageAPI {
       this.manifestSHA512 = addVersion.manifestSHA512; // nocommit: compute and populate here
     }
 
+    public PkgVersion() {
+    }
+
+    public PkgVersion(Package.AddVersion addVersion) {
+      this.version = addVersion.version;
+      this.files = addVersion.files;
+    }
+
 
     @Override
     public boolean equals(Object obj) {
@@ -228,9 +234,9 @@ public class PackageAPI {
 
 
     @Command(name = "add")
-    public void add(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<AddVersion> payload) {
+    public void add(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<Package.AddVersion> payload) {
       if (!checkEnabled(payload)) return;
-      AddVersion add = payload.get();
+      Package.AddVersion add = payload.get();
       if (add.files.isEmpty()) {
         payload.addError("No files specified");
         return;
@@ -268,9 +274,9 @@ public class PackageAPI {
     }
 
     @Command(name = "delete")
-    public void del(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<DelVersion> payload) {
+    public void del(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<Package.DelVersion> payload) {
       if (!checkEnabled(payload)) return;
-      DelVersion delVersion = payload.get();
+      Package.DelVersion delVersion = payload.get();
       try {
         coreContainer.getZkController().getZkClient().atomicUpdate(SOLR_PKGS_PATH, (stat, bytes) -> {
           Packages packages = null;
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
index 3ccd431..0287f5e 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
@@ -77,7 +77,8 @@ public class PackageListeners {
   private synchronized void invokeListeners(PackageLoader.Package pkg) {
     for (Reference<Listener> ref : listeners) {
       Listener listener = ref.get();
-      if (listener != null && listener.packageName().equals(pkg.name())) {
+      if(listener == null) continue;
+      if (listener.packageName() == null || listener.packageName().equals(pkg.name())) {
         listener.changed(pkg);
       }
     }
@@ -96,6 +97,8 @@ public class PackageListeners {
 
 
   public interface Listener {
+    /**Name of the package or null to loisten to all package changes
+     */
     String packageName();
 
     PluginInfo pluginInfo();
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
index c86e433..e82a071 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.pkg;
 
+import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.net.MalformedURLException;
@@ -35,6 +36,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
 
 import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
@@ -44,7 +46,7 @@ import org.slf4j.LoggerFactory;
 /**
  * The class that holds a mapping of various packages and classloaders
  */
-public class PackageLoader {
+public class PackageLoader implements AutoCloseable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final CoreContainer coreContainer;
@@ -94,6 +96,7 @@ public class PackageLoader {
           //other classes are holding to a reference to this objecec
           // they should know that this is removed
           p.markDeleted();
+          IOUtils.closeQuietly((Closeable) p);
         }
       }
     }
@@ -130,7 +133,7 @@ public class PackageLoader {
 
   public void notifyListeners(String pkg) {
     Package p = packageClassLoaders.get(pkg);
-    if(p != null){
+    if (p != null) {
       List<Package> l = Collections.singletonList(p);
       for (SolrCore core : coreContainer.getCores()) {
         core.getPackageListeners().packagesUpdated(l);
@@ -141,7 +144,7 @@ public class PackageLoader {
   /**
    * represents a package definition in the packages.json
    */
-  public class Package {
+  public class Package implements AutoCloseable {
     final String name;
     final Map<String, Version> myVersions = new ConcurrentHashMap<>();
     private List<String> sortedVersions = new CopyOnWriteArrayList<>();
@@ -176,7 +179,10 @@ public class PackageLoader {
         if (!newVersions.contains(s)) {
           log.info("version: {} is removed from package: {}", s, this.name);
           sortedVersions.remove(s);
-          myVersions.remove(s);
+          Version removed = myVersions.remove(s);
+          if (removed != null) {
+            IOUtils.closeQuietly((Closeable) removed);
+          }
         }
       }
 
@@ -219,7 +225,12 @@ public class PackageLoader {
 
     }
 
-    public class Version implements MapWriter {
+    @Override
+    public void close() throws Exception {
+      for (Version v : myVersions.values()) v.close();
+    }
+
+    public class Version implements MapWriter, AutoCloseable {
       private final Package parent;
       private SolrResourceLoader loader;
 
@@ -261,6 +272,13 @@ public class PackageLoader {
       public SolrResourceLoader getLoader() {
         return loader;
       }
+
+      @Override
+      public void close() throws Exception {
+        if (loader != null) {
+          loader.close();
+        }
+      }
     }
   }
 
@@ -273,4 +291,9 @@ public class PackageLoader {
     }
     return latest;
   }
+
+  @Override
+  public void close() throws Exception {
+    for (Package p : packageClassLoaders.values()) p.close();
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
index e1ddfcf..4925a41 100644
--- a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
+++ b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
@@ -264,7 +264,7 @@ public class RequestUtil {
       List<String> path = StrUtils.splitSmart(queryParamName, ".", true);
       path = path.subList(1, path.size());
       for (String jsonStr : vals) {
-        Object o = ObjectBuilder.fromJSON(jsonStr);
+        Object o = ObjectBuilder.fromJSONStrict(jsonStr);
         // zero-length strings or comments can cause this to be null (and a zero-length string can result from a json content-type w/o a body)
         if (o != null) {
           ObjectUtil.mergeObjects(json, path, o, handler);
diff --git a/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java b/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java
index 5616a2f..0e83cd8 100644
--- a/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java
+++ b/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java
@@ -81,7 +81,7 @@ public class JsonPreAnalyzedParser implements PreAnalyzedParser {
     if (val.length() == 0) {
       return res;
     }
-    Object o = ObjectBuilder.fromJSON(val);
+    Object o = ObjectBuilder.fromJSONStrict(val);
     if (!(o instanceof Map)) {
       throw new IOException("Invalid JSON type " + o.getClass().getName() + ", expected Map");
     }
diff --git a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
index 71eb86f..f3c4c66 100644
--- a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
+++ b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
@@ -31,14 +31,13 @@ import java.util.concurrent.ForkJoinPool;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.LongAdder;
 
-import com.codahale.metrics.MetricRegistry;
 import com.github.benmanes.caffeine.cache.RemovalCause;
 import com.github.benmanes.caffeine.cache.RemovalListener;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -88,7 +87,7 @@ public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V
 
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
   private MetricsMap cacheMap;
-  private MetricRegistry registry;
+  private SolrMetricsContext solrMetricsContext;
 
   private long initialRamBytes = 0;
   private final LongAdder ramBytes = new LongAdder();
@@ -202,7 +201,8 @@ public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V
   }
 
   @Override
-  public void close() {
+  public void close() throws Exception {
+    SolrCache.super.close();
     cache.invalidateAll();
     cache.cleanUp();
     if (executor instanceof ExecutorService) {
@@ -322,8 +322,8 @@ public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   @Override
@@ -337,8 +337,8 @@ public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    registry = manager.registry(registryName);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     cacheMap = new MetricsMap((detailed, map) -> {
       if (cache != null) {
         CacheStats stats = cache.stats();
@@ -362,6 +362,6 @@ public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V
         map.put("cumulative_evictions", cumulativeStats.evictionCount());
       }
     });
-    manager.registerGauge(this, registryName, cacheMap, tag, true, scope, getCategory().toString());
+    solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString());
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
index 2dc1c1e..b74b63f 100644
--- a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
@@ -16,24 +16,23 @@
  */
 package org.apache.solr.search;
 
-import com.codahale.metrics.MetricRegistry;
+import java.lang.invoke.MethodHandles;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.util.ConcurrentLRUCache;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.invoke.MethodHandles;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.TimeUnit;
-
 /**
  * SolrCache based on ConcurrentLRUCache implementation.
  * <p>
@@ -42,12 +41,11 @@ import java.util.concurrent.TimeUnit;
  * <p>
  * Also see <a href="http://wiki.apache.org/solr/SolrCaching">SolrCaching</a>
  *
- *
  * @see org.apache.solr.util.ConcurrentLRUCache
  * @see org.apache.solr.search.SolrCache
  * @since solr 1.4
  */
-public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>, Accountable {
+public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K, V>, Accountable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FastLRUCache.class);
@@ -61,7 +59,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
   private long warmupTime = 0;
 
   private String description = "Concurrent LRU Cache";
-  private ConcurrentLRUCache<K,V> cache;
+  private ConcurrentLRUCache<K, V> cache;
   private int showItems = 0;
 
   private long maxRamBytes;
@@ -75,7 +73,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
 
   private MetricsMap cacheMap;
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private MetricRegistry registry;
+  private SolrMetricsContext solrMetricsContext;
 
   @Override
   public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
@@ -117,7 +115,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
     str = (String) args.get(MAX_RAM_MB_PARAM);
     long maxRamMB = str == null ? -1 : (long) Double.parseDouble(str);
     this.maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L;
-    if (maxRamBytes != Long.MAX_VALUE)  {
+    if (maxRamBytes != Long.MAX_VALUE) {
       ramLowerWatermark = Math.round(maxRamBytes * 0.8);
       description = generateDescription(maxRamBytes, ramLowerWatermark, cleanupThread);
       cache = new ConcurrentLRUCache<>(ramLowerWatermark, maxRamBytes, cleanupThread, null, maxIdleTimeSec);
@@ -213,7 +211,7 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
    */
   protected String generateDescription(int limit, int initialSize, int minLimit, int acceptableLimit, boolean newThread) {
     String description = "Concurrent LRU Cache(maxSize=" + limit + ", initialSize=" + initialSize +
-        ", minSize="+minLimit + ", acceptableSize="+acceptableLimit+", cleanupThread="+newThread;
+        ", minSize=" + minLimit + ", acceptableSize=" + acceptableLimit + ", cleanupThread=" + newThread;
     if (isAutowarmingOn()) {
       description += ", " + getAutowarmDescription();
     }
@@ -274,10 +272,9 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
       for (int i = itemsArr.length - 1; i >= 0; i--) {
         try {
           boolean continueRegen = regenerator.regenerateItem(searcher,
-                  this, old, itemsArr[i].getKey(), itemsArr[i].getValue());
+              this, old, itemsArr[i].getKey(), itemsArr[i].getValue());
           if (!continueRegen) break;
-        }
-        catch (Exception e) {
+        } catch (Exception e) {
           SolrException.log(log, "Error during auto-warming of key:" + itemsArr[i].getKey(), e);
         }
       }
@@ -287,7 +284,8 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
 
 
   @Override
-  public void close() {
+  public void close() throws Exception {
+    SolrCache.super.close();
     // add the stats to the cumulative stats object (the first in the statsList)
     statsList.get(0).add(cache.getStats());
     statsList.remove(cache.getStats());
@@ -310,10 +308,16 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
     return metricNames;
   }
 
+
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    registry = manager.registry(registryName);
-    manager.registerGauge(this, registryName, cacheMap, tag, true, scope, getCategory().toString());
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.solrMetricsContext = parentContext.getChildContext(this);
+    this.solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString());
   }
 
   // for unit tests only
@@ -322,11 +326,6 @@ public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>,
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
-  }
-
-  @Override
   public String toString() {
     return name() + (cacheMap != null ? cacheMap.getValue().toString() : "");
   }
diff --git a/solr/core/src/java/org/apache/solr/search/LFUCache.java b/solr/core/src/java/org/apache/solr/search/LFUCache.java
index 20cf664..125f08a 100644
--- a/solr/core/src/java/org/apache/solr/search/LFUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LFUCache.java
@@ -17,19 +17,18 @@
 package org.apache.solr.search;
 
 import java.lang.invoke.MethodHandles;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.TimeUnit;
 
-import com.codahale.metrics.MetricRegistry;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.util.ConcurrentLFUCache;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -79,7 +78,8 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
   private int maxIdleTimeSec;
   private MetricsMap cacheMap;
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private MetricRegistry registry;
+  private SolrMetricsContext solrMetricsContext;
+
 
   private int maxSize;
   private int minSizeLimit;
@@ -230,7 +230,8 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
 
 
   @Override
-  public void close() {
+  public void close() throws Exception {
+    SolrCache.super.close();
     // add the stats to the cumulative stats object (the first in the statsList)
     statsList.get(0).add(cache.getStats());
     statsList.remove(cache.getStats());
@@ -263,8 +264,13 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    registry = manager.registry(registryName);
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     cacheMap = new MetricsMap((detailed, map) -> {
       if (cache != null) {
         ConcurrentLFUCache.Stats stats = cache.getStats();
@@ -330,7 +336,7 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
 
       }
     });
-    manager.registerGauge(this, registryName, cacheMap, tag, true, scope, getCategory().toString());
+    solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString());
   }
 
   // for unit tests only
@@ -344,11 +350,6 @@ public class LFUCache<K, V> implements SolrCache<K, V>, Accountable {
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
-  }
-
-  @Override
   public String toString() {
     return name + (cacheMap != null ? cacheMap.getValue().toString() : "");
   }
diff --git a/solr/core/src/java/org/apache/solr/search/LRUCache.java b/solr/core/src/java/org/apache/solr/search/LRUCache.java
index c733c07..7a1b37c 100644
--- a/solr/core/src/java/org/apache/solr/search/LRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LRUCache.java
@@ -18,22 +18,21 @@ package org.apache.solr.search;
 
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.LongAdder;
 
-import com.codahale.metrics.MetricRegistry;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.Accountables;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -77,7 +76,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
   private String description="LRU Cache";
   private MetricsMap cacheMap;
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private MetricRegistry registry;
+  private SolrMetricsContext solrMetricsContext;
   private int maxSize;
   private int initialSize;
 
@@ -234,8 +233,8 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
   }
 
   /**
-   * 
-   * @return Returns the description of this cache. 
+   *
+   * @return Returns the description of this cache.
    */
   private String generateDescription() {
     String description = "LRU Cache(maxSize=" + getMaxSize() + ", initialSize=" + initialSize;
@@ -341,9 +340,9 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
 
       // Don't do the autowarming in the synchronized block, just pull out the keys and values.
       synchronized (other.map) {
-        
+
         int sz = autowarm.getWarmCount(other.map.size());
-        
+
         keys = new Object[sz];
         vals = new Object[sz];
 
@@ -378,12 +377,6 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
     warmupTime = TimeUnit.MILLISECONDS.convert(System.nanoTime() - warmingStartTime, TimeUnit.NANOSECONDS);
   }
 
-  @Override
-  public void close() {
-
-  }
-
-
   //////////////////////// SolrInfoMBeans methods //////////////////////
 
 
@@ -403,8 +396,13 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    registry = manager.registry(registryName);
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     cacheMap = new MetricsMap((detailed, res) -> {
       synchronized (map) {
         res.put(LOOKUPS_PARAM, lookups);
@@ -433,7 +431,7 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
       res.put("cumulative_evictionsRamUsage", stats.evictionsRamUsage.longValue());
       res.put("cumulative_evictionsIdleTime", stats.evictionsIdleTime.longValue());
     });
-    manager.registerGauge(this, registryName, cacheMap, tag, true, scope, getCategory().toString());
+    solrMetricsContext.gauge(this, cacheMap, true, scope, getCategory().toString());
   }
 
   // for unit tests only
@@ -442,11 +440,6 @@ public class LRUCache<K,V> extends SolrCacheBase implements SolrCache<K,V>, Acco
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
-  }
-
-  @Override
   public String toString() {
     return name() + (cacheMap != null ? cacheMap.getValue().toString() : "");
   }
diff --git a/solr/core/src/java/org/apache/solr/search/PointMerger.java b/solr/core/src/java/org/apache/solr/search/PointMerger.java
index 59a6fb3..2dcb552 100644
--- a/solr/core/src/java/org/apache/solr/search/PointMerger.java
+++ b/solr/core/src/java/org/apache/solr/search/PointMerger.java
@@ -57,6 +57,9 @@ public class PointMerger {
     public ValueIterator(SchemaField field, List<LeafReaderContext> readers, int totalBufferSize, int minSegBufferSize) throws IOException {
       assert field.getType().isPointField();
       queue = new PQueue(readers.size());
+      if (readers.isEmpty()) {
+        return;
+      }
       long ndocs = readers.get(readers.size()-1).docBase + readers.get(readers.size()-1).reader().maxDoc();
       for (LeafReaderContext ctx : readers) {
         PointValues pv = ctx.reader().getPointValues(field.getName());
@@ -80,6 +83,7 @@ public class PointMerger {
             seg = new DoubleSeg(pv, capacity);
             break;
           case DATE:
+            seg = new DateSeg(pv, capacity);
             break;
         }
         int count = seg.setNextValue();
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCache.java b/solr/core/src/java/org/apache/solr/search/SolrCache.java
index 4a16b39..55f57ec 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCache.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCache.java
@@ -137,7 +137,9 @@ public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
 
 
   /** Frees any non-memory resources */
-  public void close();
+  default void close() throws Exception {
+    SolrMetricProducer.super.close();
+  }
 
   /** Returns maximum size limit (number of items) if set and supported, -1 otherwise. */
   int getMaxSize();
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
index 66b8ab1..7afe96d 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
@@ -22,8 +22,7 @@ import java.util.Map;
 import java.util.Set;
 
 import com.codahale.metrics.MetricRegistry;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -77,7 +76,7 @@ public class SolrCacheHolder<K, V> implements SolrCache<K,V> {
     return delegate;
   }
 
-  public void close() {
+  public void close() throws Exception {
     delegate.close();
   }
 
@@ -142,11 +141,8 @@ public class SolrCacheHolder<K, V> implements SolrCache<K,V> {
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    log.debug("Going to register cachemetrics " + Utils.toJSONString(factory));
-
-    delegate.initializeMetrics(manager, registry, tag,scope);
-
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    delegate.initializeMetrics(parentContext, scope);
   }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
index b2647cd..b6deb7c 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
@@ -19,11 +19,10 @@ package org.apache.solr.search;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
-import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.uninverting.UninvertingReader;
 
 /**
@@ -35,7 +34,7 @@ public class SolrFieldCacheBean implements SolrInfoBean, SolrMetricProducer {
   private boolean disableEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryList");
   private boolean disableJmxEntryList = Boolean.getBoolean("disableSolrFieldCacheMBeanEntryListJmx");
 
-  private MetricRegistry registry;
+  private SolrMetricsContext solrMetricsContext;
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
 
   @Override
@@ -50,14 +49,15 @@ public class SolrFieldCacheBean implements SolrInfoBean, SolrMetricProducer {
   public Set<String> getMetricNames() {
     return metricNames;
   }
+
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    registry = manager.registry(registryName);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.solrMetricsContext = parentContext;
     MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
       if (detailed && !disableEntryList && !disableJmxEntryList) {
         UninvertingReader.FieldCacheStats fieldCacheStats = UninvertingReader.getUninvertedStats();
@@ -72,6 +72,6 @@ public class SolrFieldCacheBean implements SolrInfoBean, SolrMetricProducer {
         map.put("entries_count", UninvertingReader.getUninvertedStatsSize());
       }
     });
-    manager.registerGauge(this, registryName, metricsMap, tag, true, "fieldCache", Category.CACHE.toString(), scope);
+    solrMetricsContext.gauge(this, metricsMap, true, "fieldCache", Category.CACHE.toString(), scope);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 7d33a19..4ad5904 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -35,7 +35,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 
-import com.codahale.metrics.MetricRegistry;
 import com.google.common.collect.Iterables;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.DirectoryReader;
@@ -69,6 +68,7 @@ import org.apache.solr.index.SlowCompositeReaderWrapper;
 import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
@@ -140,8 +140,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   private final StatsCache statsCache;
 
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private SolrMetricManager metricManager;
-  private String registryName;
+  private SolrMetricsContext solrMetricsContext;
 
   private static DirectoryReader getReader(SolrCore core, SolrIndexConfig config, DirectoryFactory directoryFactory,
                                            String path) throws IOException {
@@ -431,12 +430,11 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
       cache.setState(SolrCache.State.LIVE);
       infoRegistry.put(cache.name(), cache);
     }
-    metricManager = core.getCoreContainer().getMetricManager();
-    registryName = core.getCoreMetricManager().getRegistryName();
+    this.solrMetricsContext = core.getSolrMetricsContext().getChildContext(this);
     for (SolrCache cache : cacheList) {
-      cache.initializeMetrics(metricManager, registryName, core.getMetricTag(), SolrMetricManager.mkName(cache.name(), STATISTICS_KEY));
+      cache.initializeMetrics(solrMetricsContext, SolrMetricManager.mkName(cache.name(), STATISTICS_KEY));
     }
-    initializeMetrics(metricManager, registryName, core.getMetricTag(), STATISTICS_KEY);
+    initializeMetrics(solrMetricsContext, STATISTICS_KEY);
     registerTime = new Date();
   }
 
@@ -479,13 +477,23 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     }
 
     for (SolrCache cache : cacheList) {
-      cache.close();
+      try {
+        cache.close();
+      } catch (Exception e) {
+        SolrException.log(log, "Exception closing cache " + cache.name(), e);
+      }
     }
 
     if (releaseDirectory) {
       directoryFactory.release(getIndexReader().directory());
     }
 
+    try {
+      SolrMetricProducer.super.close();
+    } catch (Exception e) {
+      log.warn("Exception closing", e);
+    }
+
     // do this at the end so it only gets done if there are no exceptions
     numCloses.incrementAndGet();
     assert ObjectReleaseTracker.release(this);
@@ -2275,23 +2283,26 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    this.registryName = registry;
-    this.metricManager = manager;
-    manager.registerGauge(this, registry, () -> name, tag, true, "searcherName", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> cachingEnabled, tag, true, "caching", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> openTime, tag, true, "openedAt", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> warmupTime, tag, true, "warmupTime", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> registerTime, tag, true, "registeredAt", Category.SEARCHER.toString(), scope);
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    parentContext.gauge(this, () -> name, true, "searcherName", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> cachingEnabled, true, "caching", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> openTime, true, "openedAt", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> warmupTime, true, "warmupTime", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> registerTime, true, "registeredAt", Category.SEARCHER.toString(), scope);
     // reader stats
-    manager.registerGauge(this, registry, () -> reader.numDocs(), tag, true, "numDocs", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> reader.maxDoc(), tag, true, "maxDoc", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> reader.maxDoc() - reader.numDocs(), tag, true, "deletedDocs", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> reader.toString(), tag, true, "reader", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> reader.directory().toString(), tag, true, "readerDir", Category.SEARCHER.toString(), scope);
-    manager.registerGauge(this, registry, () -> reader.getVersion(), tag, true, "indexVersion", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> reader.numDocs(), true, "numDocs", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> reader.maxDoc(), true, "maxDoc", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> reader.maxDoc() - reader.numDocs(), true, "deletedDocs", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> reader.toString(), true, "reader", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> reader.directory().toString(), true, "readerDir", Category.SEARCHER.toString(), scope);
+    parentContext.gauge(this, () -> reader.getVersion(), true, "indexVersion", Category.SEARCHER.toString(), scope);
     // size of the currently opened commit
-    manager.registerGauge(this, registry, () -> {
+    parentContext.gauge(this, () -> {
       try {
         Collection<String> files = reader.getIndexCommit().getFileNames();
         long total = 0;
@@ -2302,19 +2313,13 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
       } catch (Exception e) {
         return -1;
       }
-    }, tag, true, "indexCommitSize", Category.SEARCHER.toString(), scope);
+    }, true, "indexCommitSize", Category.SEARCHER.toString(), scope);
     // statsCache metrics
-    manager.registerGauge(this, registry,
+    parentContext.gauge(this,
         new MetricsMap((detailed, map) -> {
           statsCache.getCacheMetrics().getSnapshot(map::put);
           map.put("statsCacheImpl", statsCache.getClass().getSimpleName());
-        }),
-        tag, true, "statsCache", Category.CACHE.toString(), scope);
-  }
-
-  @Override
-  public MetricRegistry getMetricRegistry() {
-    return core.getMetricRegistry();
+        }), true, "statsCache", Category.CACHE.toString(), scope);
   }
 
   private static class FilterImpl extends Filter {
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/GroupConverter.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/GroupConverter.java
index 0a21a62..3d27798 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/GroupConverter.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/GroupConverter.java
@@ -32,6 +32,7 @@ import org.apache.lucene.util.mutable.MutableValueDouble;
 import org.apache.lucene.util.mutable.MutableValueFloat;
 import org.apache.lucene.util.mutable.MutableValueInt;
 import org.apache.lucene.util.mutable.MutableValueLong;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.NumberType;
 import org.apache.solr.schema.SchemaField;
@@ -50,11 +51,11 @@ class GroupConverter {
     FieldType fieldType = field.getType();
     List<SearchGroup<BytesRef>> result = new ArrayList<>(values.size());
     for (SearchGroup<MutableValue> original : values) {
-      SearchGroup<BytesRef> converted = new SearchGroup<BytesRef>();
+      SearchGroup<BytesRef> converted = new SearchGroup<>();
       converted.sortValues = original.sortValues;
       if (original.groupValue.exists) {
         BytesRefBuilder binary = new BytesRefBuilder();
-        fieldType.readableToIndexed(original.groupValue.toString(), binary);
+        fieldType.readableToIndexed(Utils.OBJECT_TO_STRING.apply(original.groupValue.toObject()), binary);
         converted.groupValue = binary.get();
       } else {
         converted.groupValue = null;
@@ -68,7 +69,7 @@ class GroupConverter {
     FieldType fieldType = field.getType();
     List<SearchGroup<MutableValue>> result = new ArrayList<>(values.size());
     for (SearchGroup<BytesRef> original : values) {
-      SearchGroup<MutableValue> converted = new SearchGroup<MutableValue>();
+      SearchGroup<MutableValue> converted = new SearchGroup<>();
       converted.sortValues = original.sortValues; // ?
       NumberType type = fieldType.getNumberType();
       final MutableValue v;
@@ -147,14 +148,14 @@ class GroupConverter {
       final BytesRef groupValue;
       if (original.groupValue.exists) {
         BytesRefBuilder binary = new BytesRefBuilder();
-        fieldType.readableToIndexed(original.groupValue.toString(), binary);
+        fieldType.readableToIndexed(Utils.OBJECT_TO_STRING.apply(original.groupValue.toObject()), binary);
         groupValue = binary.get();
       } else {
         groupValue = null;
       }
-      groupDocs[i] = new GroupDocs<BytesRef>(original.score, original.maxScore, original.totalHits, original.scoreDocs, groupValue, original.groupSortValues);
+      groupDocs[i] = new GroupDocs<>(original.score, original.maxScore, original.totalHits, original.scoreDocs, groupValue, original.groupSortValues);
     }
     
-    return new TopGroups<BytesRef>(values.groupSort, values.withinGroupSort, values.totalHitCount, values.totalGroupedHitCount, groupDocs, values.maxScore);
+    return new TopGroups<>(values.groupSort, values.withinGroupSort, values.totalHitCount, values.totalGroupedHitCount, groupDocs, values.maxScore);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/security/AuditEvent.java b/solr/core/src/java/org/apache/solr/security/AuditEvent.java
index f9c45be..492384e 100644
--- a/solr/core/src/java/org/apache/solr/security/AuditEvent.java
+++ b/solr/core/src/java/org/apache/solr/security/AuditEvent.java
@@ -31,6 +31,7 @@ import java.util.stream.Collectors;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.servlet.SolrRequestParsers;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.MDC;
@@ -129,12 +130,15 @@ public class AuditEvent {
     this.solrPort = httpRequest.getLocalPort();
     this.solrIp = httpRequest.getLocalAddr();
     this.clientIp = httpRequest.getRemoteAddr();
-    this.resource = httpRequest.getContextPath();
+    this.resource = httpRequest.getPathInfo();
     this.httpMethod = httpRequest.getMethod();
     this.httpQueryString = httpRequest.getQueryString();
     this.headers = getHeadersFromRequest(httpRequest);
     this.requestUrl = httpRequest.getRequestURL();
     this.nodeName = MDC.get(ZkStateReader.NODE_NAME_PROP);
+    SolrRequestParsers.parseQueryString(httpQueryString).forEach(sp -> {
+      this.solrParams.put(sp.getKey(), Arrays.asList(sp.getValue()));
+    });
 
     setRequestType(findRequestType());
 
@@ -459,14 +463,14 @@ public class AuditEvent {
   }
   
   private static final List<String> ADMIN_PATH_REGEXES = Arrays.asList(
-      "^/solr/admin/.*",
-      "^/api/(c|collections)/$",
-      "^/api/(c|collections)/[^/]+/config$",
-      "^/api/(c|collections)/[^/]+/schema$",
-      "^/api/(c|collections)/[^/]+/shards.*",
-      "^/api/cores.*$",
-      "^/api/node$",
-      "^/api/cluster$");
+      "^/admin/.*",
+      "^/(____v2|api)/(c|collections)$",
+      "^/(____v2|api)/(c|collections)/[^/]+/config$",
+      "^/(____v2|api)/(c|collections)/[^/]+/schema$",
+      "^/(____v2|api)/(c|collections)/[^/]+/shards.*",
+      "^/(____v2|api)/cores.*$",
+      "^/(____v2|api)/node$",
+      "^/(____v2|api)/cluster$");
 
   private static final List<String> STREAMING_PATH_REGEXES = Collections.singletonList(".*/stream.*");
 
diff --git a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java
index a6c364a..d5ff666 100644
--- a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java
@@ -36,7 +36,6 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
-import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
 import com.fasterxml.jackson.annotation.JsonInclude.Include;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -45,8 +44,8 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.security.AuditEvent.EventType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -75,14 +74,12 @@ public abstract class AuditLoggerPlugin implements Closeable, Runnable, SolrInfo
   int blockingQueueSize;
 
   protected AuditEventFormatter formatter;
-  private MetricRegistry registry;
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
   private ExecutorService executorService;
   private boolean closed;
   private MuteRules muteRules;
-  
-  protected String registryName;
-  protected SolrMetricManager metricManager;
+
+  protected SolrMetricsContext solrMetricsContext;
   protected Meter numErrors = new Meter();
   protected Meter numLost = new Meter();
   protected Meter numLogged = new Meter();
@@ -239,24 +236,21 @@ public abstract class AuditLoggerPlugin implements Closeable, Runnable, SolrInfo
   }
   
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, final String scope) {
+  public void initializeMetrics(SolrMetricsContext parentContext, final String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     String className = this.getClass().getSimpleName();
     log.debug("Initializing metrics for {}", className);
-    this.metricManager = manager;
-    this.registryName = registryName;
-    // Metrics
-    registry = manager.registry(registryName);
-    numErrors = manager.meter(this, registryName, "errors", getCategory().toString(), scope, className);
-    numLost = manager.meter(this, registryName, "lost", getCategory().toString(), scope, className);
-    numLogged = manager.meter(this, registryName, "count", getCategory().toString(), scope, className);
-    requestTimes = manager.timer(this, registryName, "requestTimes", getCategory().toString(), scope, className);
-    totalTime = manager.counter(this, registryName, "totalTime", getCategory().toString(), scope, className);
+    numErrors = solrMetricsContext.meter(this, "errors", getCategory().toString(), scope, className);
+    numLost = solrMetricsContext.meter(this, "lost", getCategory().toString(), scope, className);
+    numLogged = solrMetricsContext.meter(this, "count", getCategory().toString(), scope, className);
+    requestTimes = solrMetricsContext.timer(this, "requestTimes", getCategory().toString(), scope, className);
+    totalTime = solrMetricsContext.counter(this, "totalTime", getCategory().toString(), scope, className);
     if (async) {
-      manager.registerGauge(this, registryName, () -> blockingQueueSize, "queueCapacity", true, "queueCapacity", getCategory().toString(), scope, className);
-      manager.registerGauge(this, registryName, () -> blockingQueueSize - queue.remainingCapacity(), "queueSize", true, "queueSize", getCategory().toString(), scope, className);
-      queuedTime = manager.timer(this, registryName, "queuedTime", getCategory().toString(), scope, className);
+      solrMetricsContext.gauge(this, () -> blockingQueueSize, true, "queueCapacity", getCategory().toString(), scope, className);
+      solrMetricsContext.gauge(this, () -> blockingQueueSize - queue.remainingCapacity(), true, "queueSize", getCategory().toString(), scope, className);
+      queuedTime = solrMetricsContext.timer(this, "queuedTime", getCategory().toString(), scope, className);
     }
-    manager.registerGauge(this, registryName, () -> async, "async", true, "async", getCategory().toString(), scope, className);
+    solrMetricsContext.gauge(this, () -> async, true, "async", getCategory().toString(), scope, className);
   }
   
   @Override
@@ -280,10 +274,10 @@ public abstract class AuditLoggerPlugin implements Closeable, Runnable, SolrInfo
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
-  
+
   /**
    * Interface for formatting the event
    */
@@ -325,6 +319,11 @@ public abstract class AuditLoggerPlugin implements Closeable, Runnable, SolrInfo
       closed = true;
       log.info("Shutting down async Auditlogger background thread(s)");
       executorService.shutdownNow();
+      try {
+        SolrMetricProducer.super.close();
+      } catch (Exception e) {
+        throw new IOException("Exception closing", e);
+      }
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
index 5fd18a1..320f661 100644
--- a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
@@ -19,39 +19,33 @@ package org.apache.solr.security;
 import javax.servlet.FilterChain;
 import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
-import java.io.Closeable;
-import java.util.Arrays;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
-import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
-import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.metrics.SolrMetricManager;
-import org.apache.solr.metrics.SolrMetricProducer;
-
 import org.apache.http.HttpRequest;
 import org.apache.http.protocol.HttpContext;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.eclipse.jetty.client.api.Request;
 
 /**
  * 
  * @lucene.experimental
  */
-public abstract class AuthenticationPlugin implements Closeable, SolrInfoBean, SolrMetricProducer {
+public abstract class AuthenticationPlugin implements SolrInfoBean, SolrMetricProducer {
 
   final public static String AUTHENTICATION_PLUGIN_PROP = "authenticationPlugin";
   final public static String HTTP_HEADER_X_SOLR_AUTHDATA = "X-Solr-AuthData";
 
   // Metrics
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private MetricRegistry registry;
+  protected SolrMetricsContext solrMetricsContext;
 
-  protected String registryName;
-  protected SolrMetricManager metricManager;
   protected Meter numErrors = new Meter();
   protected Counter requests = new Counter();
   protected Timer requestTimes = new Timer();
@@ -66,7 +60,7 @@ public abstract class AuthenticationPlugin implements Closeable, SolrInfoBean, S
    * @param pluginConfig Config parameters, possibly from a ZK source
    */
   public abstract void init(Map<String, Object> pluginConfig);
- 
+
   /**
    * This method attempts to authenticate the request. Upon a successful authentication, this
    * must call the next filter in the filter chain and set the user principal of the request,
@@ -107,10 +101,10 @@ public abstract class AuthenticationPlugin implements Closeable, SolrInfoBean, S
    * delegate to {@link PKIAuthenticationPlugin}. Return true to indicate that your plugin
    * did handle the request, or false to signal that PKI plugin should handle it. This method
    * will be called by {@link PKIAuthenticationPlugin}'s interceptor.
-   * 
+   *
    * <p>
    *   If not overridden, this method will return true for plugins implementing {@link HttpClientBuilderPlugin}.
-   *   This method can be overridden by subclasses e.g. to set HTTP headers, even if you don't use a clientBuilder. 
+   *   This method can be overridden by subclasses e.g. to set HTTP headers, even if you don't use a clientBuilder.
    * </p>
    * @param httpRequest the httpRequest that is about to be sent to another internal Solr node
    * @param httpContext the context of that request.
@@ -137,7 +131,7 @@ public abstract class AuthenticationPlugin implements Closeable, SolrInfoBean, S
   protected boolean interceptInternodeRequest(Request request) {
     return this instanceof HttpClientBuilderPlugin;
   }
-  
+
   /**
    * Cleanup any per request  data
    */
@@ -145,23 +139,24 @@ public abstract class AuthenticationPlugin implements Closeable, SolrInfoBean, S
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, final String scope) {
-    this.metricManager = manager;
-    this.registryName = registryName;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.solrMetricsContext = parentContext.getChildContext(this);
     // Metrics
-    registry = manager.registry(registryName);
-    numErrors = manager.meter(this, registryName, "errors", getCategory().toString(), scope);
-    requests = manager.counter(this, registryName, "requests", getCategory().toString(), scope);
-    numAuthenticated = manager.counter(this, registryName, "authenticated", getCategory().toString(), scope);
-    numPassThrough = manager.counter(this, registryName, "passThrough", getCategory().toString(), scope);
-    numWrongCredentials = manager.counter(this, registryName, "failWrongCredentials", getCategory().toString(), scope);
-    numMissingCredentials = manager.counter(this, registryName, "failMissingCredentials", getCategory().toString(), scope);
-    requestTimes = manager.timer(this, registryName, "requestTimes", getCategory().toString(), scope);
-    totalTime = manager.counter(this, registryName, "totalTime", getCategory().toString(), scope);
-    metricNames.addAll(Arrays.asList("errors", "requests", "authenticated", "passThrough",
-        "failWrongCredentials", "failMissingCredentials", "requestTimes", "totalTime"));
+    numErrors = this.solrMetricsContext.meter(this, "errors", getCategory().toString(), scope);
+    requests = this.solrMetricsContext.counter(this, "requests", getCategory().toString(), scope);
+    numAuthenticated = this.solrMetricsContext.counter(this, "authenticated",getCategory().toString(), scope);
+    numPassThrough = this.solrMetricsContext.counter(this, "passThrough",  getCategory().toString(), scope);
+    numWrongCredentials = this.solrMetricsContext.counter(this, "failWrongCredentials",getCategory().toString(), scope);
+    numMissingCredentials = this.solrMetricsContext.counter(this,  "failMissingCredentials",getCategory().toString(), scope);
+    requestTimes = this.solrMetricsContext.timer(this,"requestTimes", getCategory().toString(), scope);
+    totalTime = this.solrMetricsContext.counter(this,"totalTime", getCategory().toString(), scope);
   }
-  
+
   @Override
   public String getName() {
     return this.getClass().getName();
@@ -181,10 +176,4 @@ public abstract class AuthenticationPlugin implements Closeable, SolrInfoBean, S
   public Set<String> getMetricNames() {
     return metricNames;
   }
-
-  @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
-  }
-  
 }
diff --git a/solr/core/src/java/org/apache/solr/security/MultiDestinationAuditLogger.java b/solr/core/src/java/org/apache/solr/security/MultiDestinationAuditLogger.java
index 7ad6b5b..1e2e33e 100644
--- a/solr/core/src/java/org/apache/solr/security/MultiDestinationAuditLogger.java
+++ b/solr/core/src/java/org/apache/solr/security/MultiDestinationAuditLogger.java
@@ -26,7 +26,7 @@ import java.util.stream.Collectors;
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -53,6 +53,7 @@ public class MultiDestinationAuditLogger extends AuditLoggerPlugin implements Re
   private ResourceLoader loader;
   List<AuditLoggerPlugin> plugins = new ArrayList<>();
   List<String> pluginNames = new ArrayList<>();
+  SolrMetricsContext solrMetricsContext;
 
   /**
    * Passes the AuditEvent to all sub plugins in parallel. The event should be a {@link AuditEvent} to be able to pull context info.
@@ -118,10 +119,11 @@ public class MultiDestinationAuditLogger extends AuditLoggerPlugin implements Re
     this.loader = loader;
   }
 
+
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    super.initializeMetrics(manager, registryName, tag, scope);
-    plugins.forEach(p -> p.initializeMetrics(manager, registryName, tag, scope));
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    super.initializeMetrics(parentContext, scope);
+    plugins.forEach(p -> p.initializeMetrics(solrMetricsContext, scope));
   }
 
   @Override
diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
index 5e56b73..e7002f3 100644
--- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
+++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
@@ -474,31 +474,45 @@ public class HttpSolrCall {
     AuthorizationContext context = getAuthCtx();
     log.debug("AuthorizationContext : {}", context);
     AuthorizationResponse authResponse = cores.getAuthorizationPlugin().authorize(context);
-    if (authResponse.statusCode == AuthorizationResponse.PROMPT.statusCode) {
+    int statusCode = authResponse.statusCode;
+    
+    if (statusCode == AuthorizationResponse.PROMPT.statusCode) {
       Map<String, String> headers = (Map) getReq().getAttribute(AuthenticationPlugin.class.getName());
       if (headers != null) {
         for (Map.Entry<String, String> e : headers.entrySet()) response.setHeader(e.getKey(), e.getValue());
       }
       log.debug("USER_REQUIRED "+req.getHeader("Authorization")+" "+ req.getUserPrincipal());
+      sendError(statusCode,
+          "Authentication failed, Response code: " + statusCode);
       if (shouldAudit(EventType.REJECTED)) {
         cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.REJECTED, req, context));
       }
+      return RETURN;
     }
-    if (!(authResponse.statusCode == HttpStatus.SC_ACCEPTED) && !(authResponse.statusCode == HttpStatus.SC_OK)) {
-      log.info("USER_REQUIRED auth header {} context : {} ", req.getHeader("Authorization"), context);
-      sendError(authResponse.statusCode,
-          "Unauthorized request, Response code: " + authResponse.statusCode);
+    if (statusCode == AuthorizationResponse.FORBIDDEN.statusCode) {
+      log.debug("UNAUTHORIZED auth header {} context : {}, msg: {}", req.getHeader("Authorization"), context, authResponse.getMessage());
+      sendError(statusCode,
+          "Unauthorized request, Response code: " + statusCode);
       if (shouldAudit(EventType.UNAUTHORIZED)) {
         cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.UNAUTHORIZED, req, context));
       }
       return RETURN;
     }
+    if (!(statusCode == HttpStatus.SC_ACCEPTED) && !(statusCode == HttpStatus.SC_OK)) {
+      log.warn("ERROR {} during authentication: {}", statusCode, authResponse.getMessage());
+      sendError(statusCode,
+          "ERROR during authorization, Response code: " + statusCode);
+      if (shouldAudit(EventType.ERROR)) {
+        cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.ERROR, req, context));
+      }
+      return RETURN;
+    }
     if (shouldAudit(EventType.AUTHORIZED)) {
       cores.getAuditLoggerPlugin().doAudit(new AuditEvent(EventType.AUTHORIZED, req, context));
     }
     return null;
   }
-  
+
   /**
    * This method processes the request.
    */
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index 90d6b17..3c3e26f 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -77,6 +77,7 @@ import org.apache.solr.metrics.AltBufferPoolMetricSet;
 import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.OperatingSystemMetricSet;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.security.AuditEvent;
 import org.apache.solr.security.AuthenticationPlugin;
 import org.apache.solr.security.PKIAuthenticationPlugin;
@@ -108,7 +109,7 @@ public class SolrDispatchFilter extends BaseSolrFilter {
   
   private boolean isV2Enabled = !"true".equals(System.getProperty("disable.v2.api", "false"));
 
-  private final String metricTag = Integer.toHexString(hashCode());
+  private final String metricTag = SolrMetricProducer.getUniqueMetricTag(this, null);
   private SolrMetricManager metricManager;
   private String registryName;
   private volatile boolean closeOnDestroy = true;
diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
index 6d9e9ea..d7654b6 100644
--- a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
+++ b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
@@ -20,11 +20,10 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 
-import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.search.SolrCacheBase;
 
 /**
@@ -54,17 +53,13 @@ public class Metrics extends SolrCacheBase implements SolrInfoBean, SolrMetricPr
   public AtomicLong shardBuffercacheLost = new AtomicLong(0);
 
   private MetricsMap metricsMap;
-  private MetricRegistry registry;
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private SolrMetricManager metricManager;
-  private String registryName;
+  private SolrMetricsContext solrMetricsContext;
   private long previous = System.nanoTime();
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registryName;
-    registry = manager.registry(registryName);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     metricsMap = new MetricsMap((detailed, map) -> {
       long now = System.nanoTime();
       long delta = Math.max(now - previous, 1);
@@ -108,7 +103,7 @@ public class Metrics extends SolrCacheBase implements SolrInfoBean, SolrMetricPr
       previous = now;
 
     });
-    manager.registerGauge(this, registryName, metricsMap, tag, true, getName(), getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, metricsMap, true, getName(), getCategory().toString(), scope);
   }
 
   private float getPerSecond(long value, double seconds) {
@@ -133,8 +128,7 @@ public class Metrics extends SolrCacheBase implements SolrInfoBean, SolrMetricPr
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
-
 }
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
index 2bf60cb..0739ede 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
@@ -32,8 +32,8 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,9 +51,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
   private final ConcurrentMap<HdfsDirectory,ConcurrentMap<FileStatus,BlockLocation[]>> cache;
 
   private final Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private MetricRegistry registry;
-  private SolrMetricManager metricManager;
-  private String registryName;
+  private SolrMetricsContext solrMetricsContext;
 
   public HdfsLocalityReporter() {
     cache = new ConcurrentHashMap<>();
@@ -89,17 +87,20 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
 
   @Override
   public MetricRegistry getMetricRegistry() {
-    return registry;
+    return solrMetricsContext != null ? solrMetricsContext.getMetricRegistry() : null;
+  }
+
+  @Override
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   /**
    * Provide statistics on HDFS block locality, both in terms of bytes and block counts.
    */
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registryName;
-    registry = manager.registry(registryName);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
       long totalBytes = 0;
       long localBytes = 0;
@@ -149,7 +150,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
         map.put(LOCALITY_BLOCKS_RATIO, localCount / (double) totalCount);
       }
     });
-    manager.registerGauge(this, registryName, metricsMap, tag, true, "hdfsLocality", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, metricsMap, true, "hdfsLocality", getCategory().toString(), scope);
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
index 2b621a8..d9703a2 100644
--- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
+++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
@@ -51,8 +51,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrConfig.UpdateHandlerInfo;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
@@ -96,8 +96,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   LongAdder numDocsPending = new LongAdder();
   LongAdder numErrors = new LongAdder();
   Meter numErrorsCumulative;
-  SolrMetricManager metricManager;
-  String registryName;
+  SolrMetricsContext solrMetricsContext;
 
   // tracks when auto-commit should occur
   protected final CommitTracker commitTracker;
@@ -170,48 +169,46 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registryName;
-    this.registry = manager.registry(registryName);
-    commitCommands = manager.meter(this, registryName, "commits", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> commitTracker.getCommitCount(), tag, true, "autoCommits", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> softCommitTracker.getCommitCount(), tag, true, "softAutoCommits", getCategory().toString(), scope);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
+    commitCommands = solrMetricsContext.meter(this, "commits", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> commitTracker.getCommitCount(), true, "autoCommits", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> softCommitTracker.getCommitCount(), true, "softAutoCommits", getCategory().toString(), scope);
     if (commitTracker.getDocsUpperBound() > 0) {
-      manager.registerGauge(this, registryName, () -> commitTracker.getDocsUpperBound(), tag, true, "autoCommitMaxDocs",
+      solrMetricsContext.gauge(this, () -> commitTracker.getDocsUpperBound(), true, "autoCommitMaxDocs",
           getCategory().toString(), scope);
     }
     if (commitTracker.getTimeUpperBound() > 0) {
-      manager.registerGauge(this, registryName, () -> "" + commitTracker.getTimeUpperBound() + "ms", tag, true, "autoCommitMaxTime",
+      solrMetricsContext.gauge(this, () -> "" + commitTracker.getTimeUpperBound() + "ms", true, "autoCommitMaxTime",
           getCategory().toString(), scope);
     }
     if (commitTracker.getTLogFileSizeUpperBound() > 0) {
-      manager.registerGauge(this, registryName, () -> commitTracker.getTLogFileSizeUpperBound(), tag, true, "autoCommitMaxSize",
+      solrMetricsContext.gauge(this, () -> commitTracker.getTLogFileSizeUpperBound(), true, "autoCommitMaxSize",
           getCategory().toString(), scope);
     }
     if (softCommitTracker.getDocsUpperBound() > 0) {
-      manager.registerGauge(this, registryName, () -> softCommitTracker.getDocsUpperBound(), tag, true, "softAutoCommitMaxDocs",
+      solrMetricsContext.gauge(this, () -> softCommitTracker.getDocsUpperBound(), true, "softAutoCommitMaxDocs",
           getCategory().toString(), scope);
     }
     if (softCommitTracker.getTimeUpperBound() > 0) {
-      manager.registerGauge(this, registryName, () -> "" + softCommitTracker.getTimeUpperBound() + "ms", tag, true, "softAutoCommitMaxTime",
+      solrMetricsContext.gauge(this, () -> "" + softCommitTracker.getTimeUpperBound() + "ms", true, "softAutoCommitMaxTime",
           getCategory().toString(), scope);
     }
-    optimizeCommands = manager.meter(this, registryName, "optimizes", getCategory().toString(), scope);
-    rollbackCommands = manager.meter(this, registryName, "rollbacks", getCategory().toString(), scope);
-    splitCommands = manager.meter(this, registryName, "splits", getCategory().toString(), scope);
-    mergeIndexesCommands = manager.meter(this, registryName, "merges", getCategory().toString(), scope);
-    expungeDeleteCommands = manager.meter(this, registryName, "expungeDeletes", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> numDocsPending.longValue(), tag, true, "docsPending", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> addCommands.longValue(), tag, true, "adds", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> deleteByIdCommands.longValue(), tag, true, "deletesById", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> deleteByQueryCommands.longValue(), tag, true, "deletesByQuery", getCategory().toString(), scope);
-    manager.registerGauge(this, registryName, () -> numErrors.longValue(), tag, true, "errors", getCategory().toString(), scope);
+    optimizeCommands = solrMetricsContext.meter(this, "optimizes", getCategory().toString(), scope);
+    rollbackCommands = solrMetricsContext.meter(this, "rollbacks", getCategory().toString(), scope);
+    splitCommands = solrMetricsContext.meter(this, "splits", getCategory().toString(), scope);
+    mergeIndexesCommands = solrMetricsContext.meter(this, "merges", getCategory().toString(), scope);
+    expungeDeleteCommands = solrMetricsContext.meter(this, "expungeDeletes", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> numDocsPending.longValue(), true, "docsPending", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> addCommands.longValue(), true, "adds", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> deleteByIdCommands.longValue(), true, "deletesById", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> deleteByQueryCommands.longValue(), true, "deletesByQuery", getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, () -> numErrors.longValue(), true, "errors", getCategory().toString(), scope);
 
-    addCommandsCumulative = manager.meter(this, registryName, "cumulativeAdds", getCategory().toString(), scope);
-    deleteByIdCommandsCumulative = manager.meter(this, registryName, "cumulativeDeletesById", getCategory().toString(), scope);
-    deleteByQueryCommandsCumulative = manager.meter(this, registryName, "cumulativeDeletesByQuery", getCategory().toString(), scope);
-    numErrorsCumulative = manager.meter(this, registryName, "cumulativeErrors", getCategory().toString(), scope);
+    addCommandsCumulative = solrMetricsContext.meter(this, "cumulativeAdds", getCategory().toString(), scope);
+    deleteByIdCommandsCumulative = solrMetricsContext.meter(this, "cumulativeDeletesById", getCategory().toString(), scope);
+    deleteByQueryCommandsCumulative = solrMetricsContext.meter(this, "cumulativeDeletesByQuery", getCategory().toString(), scope);
+    numErrorsCumulative = solrMetricsContext.meter(this, "cumulativeErrors", getCategory().toString(), scope);
   }
 
   private void deleteAll() throws IOException {
@@ -805,6 +802,11 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
     softCommitTracker.close();
 
     numDocsPending.reset();
+    try {
+      SolrMetricProducer.super.close();
+    } catch (Exception e) {
+      throw new IOException("Error closing", e);
+    }
   }
 
 
@@ -915,7 +917,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   }
 
   /**
-   * Calls either {@link IndexWriter#updateDocValues} or {@link IndexWriter#updateDocument}(s) as
+   * Calls either {@link IndexWriter#updateDocValues} or <code>IndexWriter#updateDocument</code>(s) as
    * needed based on {@link AddUpdateCommand#isInPlaceUpdate}.
    * <p>
    * If the this is an UPDATE_INPLACE cmd, then all fields included in 
@@ -1012,5 +1014,10 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
   public CommitTracker getSoftCommitTracker() {
     return softCommitTracker;
   }
+  
+  @Override
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index 8fd3bef..7b7409b 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -49,8 +49,8 @@ import org.apache.solr.handler.component.ShardHandler;
 import org.apache.solr.handler.component.ShardRequest;
 import org.apache.solr.handler.component.ShardResponse;
 import org.apache.solr.logging.MDCLoggingContext;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
@@ -133,10 +133,10 @@ public class PeerSync implements SolrMetricProducer {
   public static final String METRIC_SCOPE = "peerSync";
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    syncTime = manager.timer(null, registry, "time", scope, METRIC_SCOPE);
-    syncErrors = manager.counter(null, registry, "errors", scope, METRIC_SCOPE);
-    syncSkipped = manager.counter(null, registry, "skipped", scope, METRIC_SCOPE);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    syncTime = parentContext.timer(null, "time", scope, METRIC_SCOPE);
+    syncErrors = parentContext.counter(null, "errors", scope, METRIC_SCOPE);
+    syncSkipped = parentContext.counter(null, "skipped", scope, METRIC_SCOPE);
   }
 
   public static long percentile(List<Long> arr, float frac) {
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java b/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java
index ae58662..ebe41d6 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSyncWithLeader.java
@@ -38,8 +38,8 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.logging.MDCLoggingContext;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -90,10 +90,10 @@ public class PeerSyncWithLeader implements SolrMetricProducer {
   public static final String METRIC_SCOPE = "peerSync";
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    syncTime = manager.timer(null, registry, "time", scope, METRIC_SCOPE);
-    syncErrors = manager.counter(null, registry, "errors", scope, METRIC_SCOPE);
-    syncSkipped = manager.counter(null, registry, "skipped", scope, METRIC_SCOPE);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    syncTime = parentContext.timer(null, "time", scope, METRIC_SCOPE);
+    syncErrors = parentContext.counter(null, "errors", scope, METRIC_SCOPE);
+    syncSkipped = parentContext.counter(null, "skipped", scope, METRIC_SCOPE);
   }
 
   // start of peersync related debug messages.  includes the core name for correlation.
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
index 538a983..55597b1 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
@@ -42,7 +42,7 @@ import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.schema.IndexSchema;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -88,8 +88,7 @@ public class SolrIndexWriter extends IndexWriter {
   private final AtomicLong runningMajorMergesDocs = new AtomicLong();
   private final AtomicLong runningMinorMergesDocs = new AtomicLong();
 
-  private final SolrMetricManager metricManager;
-  private final String registryName;
+  private final SolrMetricsContext solrMetricsContext;
   // merge diagnostics.
   private final Map<String, Long> runningMerges = new ConcurrentHashMap<>();
 
@@ -120,8 +119,7 @@ public class SolrIndexWriter extends IndexWriter {
     // no metrics
     mergeTotals = false;
     mergeDetails = false;
-    metricManager = null;
-    registryName = null;
+    solrMetricsContext = null;
   }
 
   private SolrIndexWriter(SolrCore core, String name, String path, Directory directory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec) throws IOException {
@@ -135,8 +133,7 @@ public class SolrIndexWriter extends IndexWriter {
     infoStream = getConfig().getInfoStream();
     this.directory = directory;
     numOpens.incrementAndGet();
-    metricManager = core.getCoreContainer().getMetricManager();
-    registryName = core.getCoreMetricManager().getRegistryName();
+    solrMetricsContext = core.getSolrMetricsContext().getChildContext(this);
     if (config.metricsInfo != null && config.metricsInfo.initArgs != null) {
       Object v = config.metricsInfo.initArgs.get("majorMergeDocs");
       if (v != null) {
@@ -160,21 +157,21 @@ public class SolrIndexWriter extends IndexWriter {
       }
       if (mergeDetails) {
         mergeTotals = true; // override
-        majorMergedDocs = metricManager.meter(null, registryName, "docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
-        majorDeletedDocs = metricManager.meter(null, registryName, "deletedDocs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        majorMergedDocs = solrMetricsContext.meter(null, "docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        majorDeletedDocs = solrMetricsContext.meter(null, "deletedDocs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
       }
       if (mergeTotals) {
-        minorMerge = metricManager.timer(null, registryName, "minor", SolrInfoBean.Category.INDEX.toString(), "merge");
-        majorMerge = metricManager.timer(null, registryName, "major", SolrInfoBean.Category.INDEX.toString(), "merge");
-        mergeErrors = metricManager.counter(null, registryName, "errors", SolrInfoBean.Category.INDEX.toString(), "merge");
+        minorMerge = solrMetricsContext.timer(null, "minor", SolrInfoBean.Category.INDEX.toString(), "merge");
+        majorMerge = solrMetricsContext.timer(null, "major", SolrInfoBean.Category.INDEX.toString(), "merge");
+        mergeErrors = solrMetricsContext.counter(null, "errors", SolrInfoBean.Category.INDEX.toString(), "merge");
         String tag = core.getMetricTag();
-        metricManager.registerGauge(null, registryName, () -> runningMajorMerges.get(), tag, true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
-        metricManager.registerGauge(null, registryName, () -> runningMinorMerges.get(), tag, true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
-        metricManager.registerGauge(null, registryName, () -> runningMajorMergesDocs.get(), tag, true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
-        metricManager.registerGauge(null, registryName, () -> runningMinorMergesDocs.get(), tag, true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
-        metricManager.registerGauge(null, registryName, () -> runningMajorMergesSegments.get(), tag, true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
-        metricManager.registerGauge(null, registryName, () -> runningMinorMergesSegments.get(), tag, true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
-        flushMeter = metricManager.meter(null, registryName, "flush", SolrInfoBean.Category.INDEX.toString());
+        solrMetricsContext.gauge(null, () -> runningMajorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        solrMetricsContext.gauge(null, () -> runningMinorMerges.get(), true, "running", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
+        solrMetricsContext.gauge(null, () -> runningMajorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        solrMetricsContext.gauge(null, () -> runningMinorMergesDocs.get(), true, "running.docs", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
+        solrMetricsContext.gauge(null, () -> runningMajorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "major");
+        solrMetricsContext.gauge(null, () -> runningMinorMergesSegments.get(), true, "running.segments", SolrInfoBean.Category.INDEX.toString(), "merge", "minor");
+        flushMeter = solrMetricsContext.meter(null, "flush", SolrInfoBean.Category.INDEX.toString());
       }
     }
   }
@@ -345,6 +342,9 @@ public class SolrIndexWriter extends IndexWriter {
       if (directoryFactory != null) {
         directoryFactory.release(directory);
       }
+      if (solrMetricsContext != null) {
+        solrMetricsContext.unregister();
+      }
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
index c8dbc10..59dae8a 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateHandler.java
@@ -22,7 +22,6 @@ import java.util.Set;
 import java.util.Vector;
 import java.util.concurrent.ConcurrentHashMap;
 
-import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.HdfsDirectoryFactory;
 import org.apache.solr.core.PluginInfo;
@@ -57,7 +56,6 @@ public abstract class UpdateHandler implements SolrInfoBean {
   protected final UpdateLog ulog;
 
   protected Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  protected MetricRegistry registry;
 
   private void parseEventListeners() {
     final Class<SolrEventListener> clazz = SolrEventListener.class;
@@ -211,8 +209,4 @@ public abstract class UpdateHandler implements SolrInfoBean {
   public Set<String> getMetricNames() {
     return metricNames;
   }
-  @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
-  }
 }
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
index 612bc6e..dbef06e 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
@@ -63,8 +63,8 @@ import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
@@ -250,8 +250,7 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
   protected Meter applyingBufferedOpsMeter;
   protected Meter replayOpsMeter;
   protected Meter copyOverOldUpdatesMeter;
-  protected SolrMetricManager metricManager;
-  protected String registryName;
+  protected SolrMetricsContext solrMetricsContext;
 
   public static class LogPtr {
     final long pointer;
@@ -432,9 +431,8 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registry;
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     bufferedOpsGauge = () -> {
       if (state == State.BUFFERING) {
         if (bufferTlog == null) return  0;
@@ -451,13 +449,13 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
       }
     };
 
-    manager.registerGauge(null, registry, bufferedOpsGauge, tag, true, "ops", scope, "buffered");
-    manager.registerGauge(null, registry, () -> logs.size(), tag, true, "logs", scope, "replay", "remaining");
-    manager.registerGauge(null, registry, () -> getTotalLogsSize(), tag, true, "bytes", scope, "replay", "remaining");
-    applyingBufferedOpsMeter = manager.meter(null, registry, "ops", scope, "applyingBuffered");
-    replayOpsMeter = manager.meter(null, registry, "ops", scope, "replay");
-    copyOverOldUpdatesMeter = manager.meter(null, registry, "ops", scope, "copyOverOldUpdates");
-    manager.registerGauge(null, registry, () -> state.getValue(), tag, true, "state", scope);
+    solrMetricsContext.gauge(null, bufferedOpsGauge, true, "ops", scope, "buffered");
+    solrMetricsContext.gauge(null, () -> logs.size(), true, "logs", scope, "replay", "remaining");
+    solrMetricsContext.gauge(null, () -> getTotalLogsSize(), true, "bytes", scope, "replay", "remaining");
+    applyingBufferedOpsMeter = solrMetricsContext.meter(null, "ops", scope, "applyingBuffered");
+    replayOpsMeter = solrMetricsContext.meter(null, "ops", scope, "replay");
+    copyOverOldUpdatesMeter = solrMetricsContext.meter(null, "ops", scope, "copyOverOldUpdates");
+    solrMetricsContext.gauge(null, () -> state.getValue(), true, "state", scope);
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
index 8e3486b..fe966cb 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
@@ -25,7 +25,6 @@ import java.util.concurrent.SynchronousQueue;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.TimeUnit;
 
-import com.codahale.metrics.MetricRegistry;
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.http.client.HttpClient;
 import org.apache.http.impl.client.CloseableHttpClient;
@@ -40,6 +39,7 @@ import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.security.HttpClientBuilderPlugin;
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.solr.update.processor.DistributingUpdateProcessorFactory;
@@ -90,7 +90,7 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoBean {
 
 
   private final Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private MetricRegistry registry;
+  private SolrMetricsContext solrMetricsContext;
 
   private int socketTimeout = HttpClientUtil.DEFAULT_SO_TIMEOUT;
   private int connectionTimeout = HttpClientUtil.DEFAULT_CONNECT_TIMEOUT;
@@ -179,14 +179,14 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoBean {
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    registry = manager.registry(registryName);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     String expandedScope = SolrMetricManager.mkName(scope, getCategory().name());
-    updateHttpListenerFactory.initializeMetrics(manager, registryName, tag, expandedScope);
-    defaultConnectionManager.initializeMetrics(manager, registryName, tag, expandedScope);
-    updateExecutor = MetricUtils.instrumentedExecutorService(updateExecutor, this, registry,
+    updateHttpListenerFactory.initializeMetrics(solrMetricsContext, expandedScope);
+    defaultConnectionManager.initializeMetrics(solrMetricsContext, expandedScope);
+    updateExecutor = MetricUtils.instrumentedExecutorService(updateExecutor, this, solrMetricsContext.getMetricRegistry(),
         SolrMetricManager.mkName("updateOnlyExecutor", expandedScope, "threadPool"));
-    recoveryExecutor = MetricUtils.instrumentedExecutorService(recoveryExecutor, this, registry,
+    recoveryExecutor = MetricUtils.instrumentedExecutorService(recoveryExecutor, this, solrMetricsContext.getMetricRegistry(),
         SolrMetricManager.mkName("recoveryExecutor", expandedScope, "threadPool"));
   }
 
@@ -206,8 +206,8 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoBean {
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   // if you are looking for a client to use, it's probably this one.
@@ -259,6 +259,11 @@ public class UpdateShardHandler implements SolrMetricProducer, SolrInfoBean {
     } catch (Exception e) {
       throw new RuntimeException(e);
     } finally {
+      try {
+        SolrMetricProducer.super.close();
+      } catch (Exception e) {
+        // do nothing
+      }
       IOUtils.closeQuietly(updateOnlyClient);
       HttpClientUtil.close(recoveryOnlyClient);
       HttpClientUtil.close(defaultClient);
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 5e04077..72021b1 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -1089,15 +1089,17 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
   }
 
   @Override
-  public void finish() throws IOException {
-    assertNotFinished();
+  public final void finish() throws IOException {
+    assert ! finished : "lifecycle sanity check";
+    finished = true;
+
+    doDistribFinish();
 
     super.finish();
   }
 
-  protected void assertNotFinished() {
-    assert ! finished : "lifecycle sanity check";
-    finished = true;
+  protected void doDistribFinish() throws IOException {
+    // no-op for derived classes to implement
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
index a76b6be..569f877 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
@@ -1045,17 +1045,10 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
     super.processRollback(cmd);
   }
 
-  @Override
-  public void finish() throws IOException {
+  // TODO: optionally fail if n replicas are not reached...
+  protected void doDistribFinish() {
     clusterState = zkController.getClusterState();
 
-    assertNotFinished();
-
-    doFinish();
-  }
-
-  // TODO: optionally fail if n replicas are not reached...
-  private void doFinish() {
     boolean shouldUpdateTerms = isLeader && isIndexChanged;
     if (shouldUpdateTerms) {
       ZkShardTerms zkShardTerms = zkController.getShardTerms(cloudDesc.getCollectionName(), cloudDesc.getShardId());
@@ -1195,6 +1188,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
     if (0 < errorsForClient.size()) {
       throw new DistributedUpdatesAsyncException(errorsForClient);
     }
+
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
index eb3c08b..5ddaf8b 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
@@ -23,6 +23,7 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.stream.Collectors;
 
 import com.google.common.collect.ImmutableMap;
 import org.apache.solr.common.SolrException;
@@ -33,9 +34,12 @@ import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.PluginBag;
 import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.pkg.PackagePluginHolder;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.update.processor.UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory;
 import org.apache.solr.util.plugin.PluginInfoInitialized;
 import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
@@ -126,8 +130,7 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized
 
     // wrap in an ArrayList so we know we know we can do fast index lookups 
     // and that add(int,Object) is supported
-    List<UpdateRequestProcessorFactory> list = new ArrayList<>
-      (solrCore.initPlugins(info.getChildren("processor"),UpdateRequestProcessorFactory.class,null));
+    List<UpdateRequestProcessorFactory> list = createProcessors(info);
 
     if(list.isEmpty()){
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
@@ -170,6 +173,23 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized
 
   }
 
+  private List<UpdateRequestProcessorFactory> createProcessors(PluginInfo info) {
+    List<PluginInfo> processors = info.getChildren("processor");
+    return processors.stream().map(it -> {
+      if(it.pkgName == null){
+        return solrCore.createInitInstance(it, UpdateRequestProcessorFactory.class,
+            UpdateRequestProcessorFactory.class.getSimpleName(), null);
+
+      } else {
+        return new LazyUpdateRequestProcessorFactory(new PackagePluginHolder(
+            it,
+            solrCore,
+            SolrConfig.classVsSolrPluginInfo.get(UpdateRequestProcessorFactory.class.getName())));
+      }
+    }).collect(Collectors.toList());
+  }
+
+
   /**
    * Creates a chain backed directly by the specified list. Modifications to
    * the array will affect future calls to <code>createProcessor</code>
@@ -328,7 +348,7 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized
   public static class LazyUpdateProcessorFactoryHolder extends PluginBag.PluginHolder<UpdateRequestProcessorFactory> {
     private volatile UpdateRequestProcessorFactory lazyFactory;
 
-    public LazyUpdateProcessorFactoryHolder(final PluginBag.LazyPluginHolder holder) {
+    public LazyUpdateProcessorFactoryHolder(final PluginBag.PluginHolder holder) {
       super(holder.getPluginInfo());
       lazyFactory = new LazyUpdateRequestProcessorFactory(holder);
     }
@@ -339,27 +359,20 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized
       return lazyFactory;
     }
 
-    public class LazyUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory {
-      private final PluginBag.LazyPluginHolder holder;
-      UpdateRequestProcessorFactory delegate;
+    public static class LazyUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory {
+      private final PluginBag.PluginHolder<UpdateRequestProcessorFactory> holder;
 
-      public LazyUpdateRequestProcessorFactory(PluginBag.LazyPluginHolder holder) {
+      public LazyUpdateRequestProcessorFactory(PluginBag.PluginHolder holder) {
         this.holder = holder;
       }
 
       public UpdateRequestProcessorFactory getDelegate() {
-        return delegate;
+        return holder.get();
       }
 
       @Override
       public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
-        if (delegate != null) return delegate.getInstance(req, rsp, next);
-
-        synchronized (this) {
-          if (delegate == null)
-            delegate = (UpdateRequestProcessorFactory) holder.get();
-        }
-        return delegate.getInstance(req, rsp, next);
+        return holder.get().getInstance(req, rsp, next);
       }
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 3eddf06..3feb0da 100755
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -116,6 +116,7 @@ import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.cloud.autoscaling.sim.NoopDistributedQueueFactory;
 import org.apache.solr.cloud.autoscaling.sim.SimCloudManager;
+import org.apache.solr.cloud.autoscaling.sim.SimScenario;
 import org.apache.solr.cloud.autoscaling.sim.SimUtils;
 import org.apache.solr.cloud.autoscaling.sim.SnapshotCloudManager;
 import org.apache.solr.common.MapWriter;
@@ -932,12 +933,17 @@ public class SolrCLI implements CLIO {
               .withLongOpt("iterations")
               .create("i"),
           OptionBuilder
-              .withDescription("Save autoscaling shapshots at each step of simulated execution.")
+              .withDescription("Save autoscaling snapshots at each step of simulated execution.")
               .withArgName("DIR")
               .withLongOpt("saveSimulated")
               .hasArg()
               .create("ss"),
           OptionBuilder
+              .withDescription("Execute a scenario from a file (and ignore all other options).")
+              .withArgName("FILE")
+              .hasArg()
+              .create("scenario"),
+          OptionBuilder
               .withDescription("Turn on all options to get all available information.")
               .create("all")
 
@@ -951,6 +957,15 @@ public class SolrCLI implements CLIO {
 
     protected void runImpl(CommandLine cli) throws Exception {
       raiseLogLevelUnlessVerbose(cli);
+      if (cli.hasOption("scenario")) {
+        String data = IOUtils.toString(new FileInputStream(cli.getOptionValue("scenario")), "UTF-8");
+        try (SimScenario scenario = SimScenario.load(data)) {
+          scenario.verbose = verbose;
+          scenario.console = CLIO.getOutStream();
+          scenario.run();
+        }
+        return;
+      }
       SnapshotCloudManager cloudManager;
       AutoScalingConfig config = null;
       String configFile = cli.getOptionValue("a");
diff --git a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java
index d452502..c3bc3e5 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpListenerFactory.java
@@ -21,11 +21,10 @@ import java.util.HashMap;
 import java.util.Locale;
 import java.util.Map;
 
-import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
 import org.apache.solr.client.solrj.impl.HttpListenerFactory;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.eclipse.jetty.client.api.Request;
 import org.eclipse.jetty.client.api.Result;
 
@@ -64,9 +63,7 @@ public class InstrumentedHttpListenerFactory implements SolrMetricProducer, Http
     KNOWN_METRIC_NAME_STRATEGIES.put("methodOnly", METHOD_ONLY);
   }
 
-  protected MetricRegistry metricsRegistry;
-  protected SolrMetricManager metricManager;
-  protected String registryName;
+  protected SolrMetricsContext solrMetricsContext;
   protected String scope;
   protected NameStrategy nameStrategy;
 
@@ -85,7 +82,7 @@ public class InstrumentedHttpListenerFactory implements SolrMetricProducer, Http
 
       @Override
       public void onBegin(Request request) {
-        if (metricsRegistry != null) {
+        if (solrMetricsContext != null) {
           timerContext = timer(request).time();
         }
       }
@@ -100,14 +97,12 @@ public class InstrumentedHttpListenerFactory implements SolrMetricProducer, Http
   }
 
   private Timer timer(Request request) {
-    return metricsRegistry.timer(nameStrategy.getNameFor(scope, request));
+    return solrMetricsContext.timer(null, nameStrategy.getNameFor(scope, request));
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registry;
-    this.metricsRegistry = manager.registry(registry);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.solrMetricsContext = parentContext;
     this.scope = scope;
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpRequestExecutor.java b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpRequestExecutor.java
index 246777c..4adbe68 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpRequestExecutor.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedHttpRequestExecutor.java
@@ -23,7 +23,6 @@ import java.util.HashMap;
 import java.util.Locale;
 import java.util.Map;
 
-import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
 import org.apache.http.HttpClientConnection;
 import org.apache.http.HttpException;
@@ -34,8 +33,8 @@ import org.apache.http.client.methods.HttpRequestWrapper;
 import org.apache.http.client.utils.URIBuilder;
 import org.apache.http.protocol.HttpContext;
 import org.apache.http.protocol.HttpRequestExecutor;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 
 import static org.apache.solr.metrics.SolrMetricManager.mkName;
 
@@ -91,11 +90,9 @@ public class InstrumentedHttpRequestExecutor extends HttpRequestExecutor impleme
     KNOWN_METRIC_NAME_STRATEGIES.put("methodOnly", METHOD_ONLY);
   }
 
-  protected MetricRegistry metricsRegistry;
-  protected SolrMetricManager metricManager;
-  protected String registryName;
   protected String scope;
   protected HttpClientMetricNameStrategy nameStrategy;
+  protected SolrMetricsContext solrMetricsContext;
 
   public InstrumentedHttpRequestExecutor(int waitForContinue, HttpClientMetricNameStrategy nameStrategy) {
     super(waitForContinue);
@@ -111,9 +108,14 @@ public class InstrumentedHttpRequestExecutor extends HttpRequestExecutor impleme
   }
 
   @Override
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
   public HttpResponse execute(HttpRequest request, HttpClientConnection conn, HttpContext context) throws IOException, HttpException {
     Timer.Context timerContext = null;
-    if (metricsRegistry != null) {
+    if (solrMetricsContext != null) {
       timerContext = timer(request).time();
     }
     try {
@@ -126,14 +128,12 @@ public class InstrumentedHttpRequestExecutor extends HttpRequestExecutor impleme
   }
 
   private Timer timer(HttpRequest request) {
-    return metricsRegistry.timer(nameStrategy.getNameFor(scope, request));
+    return solrMetricsContext.timer(null, nameStrategy.getNameFor(scope, request));
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registry;
-    this.metricsRegistry = manager.registry(registry);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.solrMetricsContext = parentContext.getChildContext(this);
     this.scope = scope;
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
index 398ab8b..c7397ba 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
@@ -22,6 +22,7 @@ import org.apache.http.conn.socket.ConnectionSocketFactory;
 import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 
 /**
  * Sub-class of PoolingHttpClientConnectionManager which tracks metrics interesting to Solr.
@@ -29,25 +30,28 @@ import org.apache.solr.metrics.SolrMetricProducer;
  */
 public class InstrumentedPoolingHttpClientConnectionManager extends PoolingHttpClientConnectionManager implements SolrMetricProducer {
 
-  private SolrMetricManager metricManager;
-  private String registryName;
+  private SolrMetricsContext solrMetricsContext;
 
   public InstrumentedPoolingHttpClientConnectionManager(Registry<ConnectionSocketFactory> socketFactoryRegistry) {
     super(socketFactoryRegistry);
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registry;
-    manager.registerGauge(null, registry, () -> getTotalStats().getAvailable(),
-        tag, true, SolrMetricManager.mkName("availableConnections", scope));
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    this.solrMetricsContext = parentContext.getChildContext(this);
+    parentContext.gauge(null, () -> getTotalStats().getAvailable(),
+        true, SolrMetricManager.mkName("availableConnections", scope));
     // this acquires a lock on the connection pool; remove if contention sucks
-    manager.registerGauge(null, registry, () -> getTotalStats().getLeased(),
-        tag, true, SolrMetricManager.mkName("leasedConnections", scope));
-    manager.registerGauge(null, registry, () -> getTotalStats().getMax(),
-        tag, true, SolrMetricManager.mkName("maxConnections", scope));
-    manager.registerGauge(null, registry, () -> getTotalStats().getPending(),
-        tag, true, SolrMetricManager.mkName("pendingConnections", scope));
+    parentContext.gauge(null, () -> getTotalStats().getLeased(),
+        true, SolrMetricManager.mkName("leasedConnections", scope));
+    parentContext.gauge(null, () -> getTotalStats().getMax(),
+        true, SolrMetricManager.mkName("maxConnections", scope));
+    parentContext.gauge(null, () -> getTotalStats().getPending(),
+        true, SolrMetricManager.mkName("pendingConnections", scope));
   }
 }
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema.xml b/solr/core/src/test-files/solr/collection1/conf/schema.xml
index 9f46cec..d5cf090 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema.xml
@@ -841,6 +841,11 @@
    <dynamicField name="*_ds_ni_p"   type="pdouble"    indexed="false"  stored="true" docValues="true" multiValued="true"/>
    <dynamicField name="*_sortable"  type="sortable_text" indexed="true" multiValued="false" stored="true" />
 
+  <dynamicField name="*_date_p"      type="pdate"    indexed="true"  stored="true" docValues="true" multiValued="false"/>
+  <dynamicField name="*_dates_p"     type="pdate"    indexed="true"  stored="true" docValues="true" multiValued="true"/>
+  <dynamicField name="*_date_ni_p"   type="pdate"    indexed="false"  stored="true" docValues="true" multiValued="false"/>
+  <dynamicField name="*_dates_ni_p"  type="pdate"    indexed="false"  stored="true" docValues="true" multiValued="true"/>
+
   <copyField source="single_i_dvn" dest="copy_single_i_dvn"/>
   <copyField source="single_d_dvn" dest="copy_single_d_dvn"/>
   <copyField source="single_s_dvn" dest="copy_single_s_dvn"/>
diff --git a/solr/core/src/test-files/solr/security/auditlog_plugin_security.json b/solr/core/src/test-files/solr/security/auditlog_plugin_security.json
index 254e616..750ac5e 100644
--- a/solr/core/src/test-files/solr/security/auditlog_plugin_security.json
+++ b/solr/core/src/test-files/solr/security/auditlog_plugin_security.json
@@ -3,7 +3,7 @@
     "class": "solr.CallbackAuditLoggerPlugin",
     "callbackPort": "_PORT_",
     "async": _ASYNC_,
-    "delay": "_DELAY_",
+    "semaphore": _SEMAPHORE_,
     "muteRules": _MUTERULES_
   },_AUTH_
-}
\ No newline at end of file
+}
diff --git a/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java
index 57a7d87..6380db2 100644
--- a/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java
+++ b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java
@@ -24,6 +24,7 @@ import org.apache.solr.handler.component.SearchHandler;
 import org.apache.solr.highlight.DefaultSolrHighlighter;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.search.LRUCache;
 import org.junit.BeforeClass;
 import java.io.File;
@@ -59,13 +60,14 @@ public class SolrInfoBeanTest extends SolrTestCaseJ4
     int checked = 0;
     SolrMetricManager metricManager = h.getCoreContainer().getMetricManager();
     String registry = h.getCore().getCoreMetricManager().getRegistryName();
+    SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
     String scope = TestUtil.randomSimpleString(random(), 2, 10);
     for( Class clazz : classes ) {
       if( SolrInfoBean.class.isAssignableFrom( clazz ) ) {
         try {
           SolrInfoBean info = (SolrInfoBean)clazz.getConstructor().newInstance();
           if (info instanceof SolrMetricProducer) {
-            ((SolrMetricProducer)info).initializeMetrics(metricManager, registry, "foo", scope);
+            ((SolrMetricProducer)info).initializeMetrics(solrMetricsContext, scope);
           }
           
           //System.out.println( info.getClass() );
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
index d16fbbe..1cba72f 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
@@ -57,8 +57,8 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
   String s1dv = "a_s_dvo";
   String b1dv = "a_b_dvo";
   String tlong = "other_tl1";
-  String tdate_a = "a_n_tdt";
-  String tdate_b = "b_n_tdt";
+  String tdate_a = "a_n_tdt1"; // use single-valued date field
+  String tdate_b = "b_n_tdt1";
   String oddField="oddField_s1";
 
   @Test
@@ -248,6 +248,9 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
         "group.query", t1 + ":kings OR " + t1 + ":eggs", "rows", "13", "start", "2",
         "fl", "id", "group.main", "true", "sort", i1 + " asc, id asc");
 
+    // SOLR-9802
+    query("q", "*:*", "group", "true", "group.field", tdate_a, "sort", i1 + " asc, id asc", "fl", "id");
+
     // SOLR-3109
     query("q", t1 + ":eggs", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", tlong + " asc, id asc");
     query("q", i1 + ":232", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", tlong + " asc, id asc");
diff --git a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
index b6460f6..d885484 100644
--- a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
@@ -34,6 +34,7 @@ import org.apache.solr.client.solrj.impl.BinaryResponseParser;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.GroupParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.index.LogDocMergePolicyFactory;
 import org.apache.solr.request.SolrQueryRequest;
@@ -699,6 +700,30 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
   }
 
   @Test
+  public void testGroupingOnDateField() throws Exception {
+    assertU(add(doc("id", "1",  "date_dt", "2012-11-20T00:00:00Z")));
+    assertU(add(doc("id", "2",  "date_dt", "2012-11-21T00:00:00Z")));
+    assertU(commit());
+
+    assertU(add(doc("id", "3",  "date_dt", "2012-11-20T00:00:00Z")));
+    assertU(add(doc("id", "4",  "date_dt", "2013-01-15T00:00:00Z")));
+    assertU(add(doc("id", "5")));
+    assertU(commit());
+
+    ModifiableSolrParams params = params("q", "*:*", "group.limit", "10",
+        "group", "true", "fl", "id", "group.ngroups", "true");
+
+    assertJQ(req(params, "group.field", "date_dt", "sort", "id asc"),
+        "/grouped=={'date_dt':{'matches':5,'ngroups':4, 'groups':" +
+            "[{'groupValue':'2012-11-20T00:00:00Z','doclist':{'numFound':2,'start':0,'docs':[{'id':'1'},{'id':'3'}]}}," +
+            "{'groupValue':'2012-11-21T00:00:00Z','doclist':{'numFound':1,'start':0,'docs':[{'id':'2'}]}}," +
+            "{'groupValue':'2013-01-15T00:00:00Z','doclist':{'numFound':1,'start':0,'docs':[{'id':'4'}]}}," +
+            "{'groupValue':null,'doclist':{'numFound':1,'start':0,'docs':[{'id':'5'}]}}" +
+            "]}}"
+    );
+  }
+
+  @Test
   public void testRandomGrouping() throws Exception {
     /**
      updateJ("{\"add\":{\"doc\":{\"id\":\"77\"}}}", params("commit","true"));
diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
index 843b238..025460c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
@@ -81,7 +81,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
       fail("no overseer leader!");
     }
   }
-  
+
   @After
   public void afterTest() throws Exception {
     try {
@@ -100,7 +100,9 @@ public class MoveReplicaTest extends SolrCloudTestCase {
 
     CloudSolrClient cloudClient = cluster.getSolrClient();
 
-    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, REPLICATION);
+    // random create tlog or pull type replicas with nrt
+    boolean isTlog = random().nextBoolean();
+    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, 1, isTlog ? 1 : 0, !isTlog ? 1 : 0);
     create.setMaxShardsPerNode(2);
     create.setAutoAddReplicas(false);
     cloudClient.request(create);
@@ -126,8 +128,8 @@ public class MoveReplicaTest extends SolrCloudTestCase {
       }
     }
 
-    int sourceNumCores = getNumOfCores(cloudClient, replica.getNodeName(), coll);
-    int targetNumCores = getNumOfCores(cloudClient, targetNode, coll);
+    int sourceNumCores = getNumOfCores(cloudClient, replica.getNodeName(), coll, replica.getType().name());
+    int targetNumCores = getNumOfCores(cloudClient, targetNode, coll, replica.getType().name());
 
     CollectionAdminRequest.MoveReplica moveReplica = createMoveReplicaRequest(coll, replica, targetNode);
     moveReplica.setInPlaceMove(inPlaceMove);
@@ -146,8 +148,8 @@ public class MoveReplicaTest extends SolrCloudTestCase {
       Thread.sleep(500);
     }
     assertTrue(success);
-    assertEquals("should be one less core on the source node!", sourceNumCores - 1, getNumOfCores(cloudClient, replica.getNodeName(), coll));
-    assertEquals("should be one more core on target node!", targetNumCores + 1, getNumOfCores(cloudClient, targetNode, coll));
+    assertEquals("should be one less core on the source node!", sourceNumCores - 1, getNumOfCores(cloudClient, replica.getNodeName(), coll, replica.getType().name()));
+    assertEquals("should be one more core on target node!", targetNumCores + 1, getNumOfCores(cloudClient, targetNode, coll, replica.getType().name()));
     // wait for recovery
     boolean recovered = false;
     for (int i = 0; i < 300; i++) {
@@ -230,6 +232,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
 
     assertEquals(100, cluster.getSolrClient().query(coll, new SolrQuery("*:*")).getResults().getNumFound());
   }
+
   //Commented out 5-Dec-2017
   // @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-11458")
   @Test
@@ -242,7 +245,9 @@ public class MoveReplicaTest extends SolrCloudTestCase {
 
     CloudSolrClient cloudClient = cluster.getSolrClient();
 
-    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, REPLICATION);
+    // random create tlog or pull type replicas with nrt
+    boolean isTlog = random().nextBoolean();
+    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, 1, isTlog ? 1 : 0, !isTlog ? 1 : 0);
     create.setAutoAddReplicas(false);
     cloudClient.request(create);
 
@@ -315,28 +320,40 @@ public class MoveReplicaTest extends SolrCloudTestCase {
   }
 
   private void checkNumOfCores(CloudSolrClient cloudClient, String nodeName, String collectionName, int expectedCores) throws IOException, SolrServerException {
-    assertEquals(nodeName + " does not have expected number of cores",expectedCores, getNumOfCores(cloudClient, nodeName, collectionName));
+    assertEquals(nodeName + " does not have expected number of cores", expectedCores, getNumOfCores(cloudClient, nodeName, collectionName));
   }
 
   private int getNumOfCores(CloudSolrClient cloudClient, String nodeName, String collectionName) throws IOException, SolrServerException {
+    return getNumOfCores(cloudClient, nodeName, collectionName, null);
+  }
+
+  private int getNumOfCores(CloudSolrClient cloudClient, String nodeName, String collectionName, String replicaType) throws IOException, SolrServerException {
     try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(nodeName))) {
       CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient);
       if (status.getCoreStatus().size() == 0) {
         return 0;
       }
-      // filter size by collection name
-      if (collectionName == null) {
+      if (collectionName == null && replicaType == null) {
         return status.getCoreStatus().size();
-      } else {
-        int size = 0;
-        for (Map.Entry<String, NamedList<Object>> stringNamedListEntry : status.getCoreStatus()) {
+      }
+      // filter size by collection name
+      int size = 0;
+      for (Map.Entry<String, NamedList<Object>> stringNamedListEntry : status.getCoreStatus()) {
+        if (collectionName != null) {
           String coll = (String) stringNamedListEntry.getValue().findRecursive("cloud", "collection");
-          if (collectionName.equals(coll)) {
-            size++;
+          if (!collectionName.equals(coll)) {
+            continue;
+          }
+        }
+        if (replicaType != null) {
+          String type = (String) stringNamedListEntry.getValue().findRecursive("cloud", "replicaType");
+          if (!replicaType.equals(type)) {
+            continue;
           }
         }
-        return size;
+        size++;
       }
+      return size;
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLRUStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestLRUStatsCacheCloud.java
index e7ae992..6a51000 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestLRUStatsCacheCloud.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestLRUStatsCacheCloud.java
@@ -21,7 +21,8 @@ import org.apache.solr.search.stats.LRUStatsCache;
 /**
  *
  */
-public class TestLRUStatsCacheCloud extends TestBaseStatsCacheCloud {
+public class
+TestLRUStatsCacheCloud extends TestBaseStatsCacheCloud {
   @Override
   protected boolean assertSameScores() {
     return true;
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java
index 763cdd4..1cd70f4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java
@@ -18,14 +18,12 @@ package org.apache.solr.cloud;
 
 import java.lang.invoke.MethodHandles;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
-import org.apache.solr.client.solrj.impl.PreemptiveBasicAuthClientBuilderFactory;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -45,10 +43,6 @@ public class TestQueryingOnDownCollection extends SolrCloudTestCase {
   private static final String USERNAME = "solr";
   private static final String PASSWORD = "solr";
 
-  static {
-    System.setProperty("basicauth", String.format(Locale.ROOT,"{}:{}", USERNAME, PASSWORD));
-  }
-
   @BeforeClass
   public static void setupCluster() throws Exception {
     configureCluster(3)
@@ -107,8 +101,6 @@ public class TestQueryingOnDownCollection extends SolrCloudTestCase {
     // run same set of tests on v2 client which uses V2HttpCall
     Http2SolrClient v2Client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString())
         .build();
-    PreemptiveBasicAuthClientBuilderFactory factory = new PreemptiveBasicAuthClientBuilderFactory();
-    factory.setup(v2Client);
 
     error = expectThrows(SolrException.class,
         "Request should fail after trying all replica nodes once",
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
index 030722f..8f5b94f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
@@ -39,7 +39,6 @@ import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.methods.HttpPost;
 import org.apache.http.entity.StringEntity;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -76,7 +75,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @Slow
-@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12313")
 public class TestTlogReplica extends SolrCloudTestCase {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -90,6 +88,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
 
   @BeforeClass
   public static void setupCluster() throws Exception {
+    System.setProperty("solr.waitToSeeReplicasInStateTimeoutSeconds", "30");
     configureCluster(2) // 2 + random().nextInt(3)
         .addConfig("conf", configset("cloud-minimal-inplace-updates"))
         .configure();
@@ -149,80 +148,75 @@ public class TestTlogReplica extends SolrCloudTestCase {
   }
 
   @Repeat(iterations=2) // 2 times to make sure cleanup is complete and we can create the same collection
-  // commented out on: 17-Feb-2019   @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 09-Aug-2018
   public void testCreateDelete() throws Exception {
-    try {
-      switch (random().nextInt(3)) {
-        case 0:
-          CollectionAdminRequest.createCollection(collectionName, "conf", 2, 0, 4, 0)
-          .setMaxShardsPerNode(100)
-          .process(cluster.getSolrClient());
-          cluster.waitForActiveCollection(collectionName, 2, 8);
-          break;
-        case 1:
-          // Sometimes don't use SolrJ
-          String url = String.format(Locale.ROOT, "%s/admin/collections?action=CREATE&name=%s&collection.configName=%s&numShards=%s&tlogReplicas=%s&maxShardsPerNode=%s",
-              cluster.getRandomJetty(random()).getBaseUrl(),
-              collectionName, "conf",
-              2,    // numShards
-              4,    // tlogReplicas
-              100); // maxShardsPerNode
-          HttpGet createCollectionGet = new HttpGet(url);
-          HttpResponse httpResponse = cluster.getSolrClient().getHttpClient().execute(createCollectionGet);
-          assertEquals(200, httpResponse.getStatusLine().getStatusCode());
-          cluster.waitForActiveCollection(collectionName, 2, 8);
-          break;
-        case 2:
-          // Sometimes use V2 API
-          url = cluster.getRandomJetty(random()).getBaseUrl().toString() + "/____v2/c";
-          String requestBody = String.format(Locale.ROOT, "{create:{name:%s, config:%s, numShards:%s, tlogReplicas:%s, maxShardsPerNode:%s}}",
-              collectionName, "conf",
-              2,    // numShards
-              4,    // tlogReplicas
-              100); // maxShardsPerNode
-          HttpPost createCollectionPost = new HttpPost(url);
-          createCollectionPost.setHeader("Content-type", "application/json");
-          createCollectionPost.setEntity(new StringEntity(requestBody));
-          httpResponse = cluster.getSolrClient().getHttpClient().execute(createCollectionPost);
-          assertEquals(200, httpResponse.getStatusLine().getStatusCode());
-          cluster.waitForActiveCollection(collectionName, 2, 8);
-          break;
-      }
+    switch (random().nextInt(3)) {
+      case 0:
+        CollectionAdminRequest.createCollection(collectionName, "conf", 2, 0, 4, 0)
+        .setMaxShardsPerNode(100)
+        .process(cluster.getSolrClient());
+        cluster.waitForActiveCollection(collectionName, 2, 8);
+        break;
+      case 1:
+        // Sometimes don't use SolrJ
+        String url = String.format(Locale.ROOT, "%s/admin/collections?action=CREATE&name=%s&collection.configName=%s&numShards=%s&tlogReplicas=%s&maxShardsPerNode=%s",
+            cluster.getRandomJetty(random()).getBaseUrl(),
+            collectionName, "conf",
+            2,    // numShards
+            4,    // tlogReplicas
+            100); // maxShardsPerNode
+        HttpGet createCollectionGet = new HttpGet(url);
+        HttpResponse httpResponse = cluster.getSolrClient().getHttpClient().execute(createCollectionGet);
+        assertEquals(200, httpResponse.getStatusLine().getStatusCode());
+        cluster.waitForActiveCollection(collectionName, 2, 8);
+        break;
+      case 2:
+        // Sometimes use V2 API
+        url = cluster.getRandomJetty(random()).getBaseUrl().toString() + "/____v2/c";
+        String requestBody = String.format(Locale.ROOT, "{create:{name:%s, config:%s, numShards:%s, tlogReplicas:%s, maxShardsPerNode:%s}}",
+            collectionName, "conf",
+            2,    // numShards
+            4,    // tlogReplicas
+            100); // maxShardsPerNode
+        HttpPost createCollectionPost = new HttpPost(url);
+        createCollectionPost.setHeader("Content-type", "application/json");
+        createCollectionPost.setEntity(new StringEntity(requestBody));
+        httpResponse = cluster.getSolrClient().getHttpClient().execute(createCollectionPost);
+        assertEquals(200, httpResponse.getStatusLine().getStatusCode());
+        cluster.waitForActiveCollection(collectionName, 2, 8);
+        break;
+    }
 
-      boolean reloaded = false;
-      while (true) {
-        DocCollection docCollection = getCollectionState(collectionName);
-        assertNotNull(docCollection);
-        assertEquals("Expecting 2 shards",
-            2, docCollection.getSlices().size());
-        assertEquals("Expecting 4 relpicas per shard",
-            8, docCollection.getReplicas().size());
-        assertEquals("Expecting 8 tlog replicas, 4 per shard",
-            8, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).size());
-        assertEquals("Expecting no nrt replicas",
-            0, docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).size());
-        assertEquals("Expecting no pull replicas",
-            0, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).size());
-        for (Slice s:docCollection.getSlices()) {
-          assertTrue(s.getLeader().getType() == Replica.Type.TLOG);
-          List<String> shardElectionNodes = cluster.getZkClient().getChildren(ZkStateReader.getShardLeadersElectPath(collectionName, s.getName()), null, true);
-          assertEquals("Unexpected election nodes for Shard: " + s.getName() + ": " + Arrays.toString(shardElectionNodes.toArray()),
-              4, shardElectionNodes.size());
-        }
-        assertUlogPresence(docCollection);
-        if (reloaded) {
-          break;
-        } else {
-          // reload
-          CollectionAdminResponse response = CollectionAdminRequest.reloadCollection(collectionName)
-          .process(cluster.getSolrClient());
-          assertEquals(0, response.getStatus());
-          waitForState("failed waiting for active colletion", collectionName, clusterShape(2, 8));
-          reloaded = true;
-        }
+    boolean reloaded = false;
+    while (true) {
+      DocCollection docCollection = getCollectionState(collectionName);
+      assertNotNull(docCollection);
+      assertEquals("Expecting 2 shards",
+          2, docCollection.getSlices().size());
+      assertEquals("Expecting 4 relpicas per shard",
+          8, docCollection.getReplicas().size());
+      assertEquals("Expecting 8 tlog replicas, 4 per shard",
+          8, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).size());
+      assertEquals("Expecting no nrt replicas",
+          0, docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).size());
+      assertEquals("Expecting no pull replicas",
+          0, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).size());
+      for (Slice s:docCollection.getSlices()) {
+        assertTrue(s.getLeader().getType() == Replica.Type.TLOG);
+        List<String> shardElectionNodes = cluster.getZkClient().getChildren(ZkStateReader.getShardLeadersElectPath(collectionName, s.getName()), null, true);
+        assertEquals("Unexpected election nodes for Shard: " + s.getName() + ": " + Arrays.toString(shardElectionNodes.toArray()),
+            4, shardElectionNodes.size());
+      }
+      assertUlogPresence(docCollection);
+      if (reloaded) {
+        break;
+      } else {
+        // reload
+        CollectionAdminResponse response = CollectionAdminRequest.reloadCollection(collectionName)
+        .process(cluster.getSolrClient());
+        assertEquals(0, response.getStatus());
+        waitForState("failed waiting for active colletion", collectionName, clusterShape(2, 8));
+        reloaded = true;
       }
-    } finally {
-      zkClient().printLayoutToStream(System.out);
     }
   }
 
@@ -409,17 +403,8 @@ public class TestTlogReplica extends SolrCloudTestCase {
     docCollection = assertNumberOfReplicas(0, 1, 0, true, true);
 
     // Wait until a new leader is elected
-    TimeOut t = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME);
-    while (!t.hasTimedOut()) {
-      docCollection = getCollectionState(collectionName);
-      Replica leader = docCollection.getSlice("shard1").getLeader();
-      if (leader != null && leader.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) {
-        break;
-      }
-      Thread.sleep(500);
-    }
-    assertFalse("Timeout waiting for a new leader to be elected", t.hasTimedOut());
-
+    waitForLeaderChange(leaderJetty, "shard1");
+    
     // There is a new leader, I should be able to add and commit
     cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2", "foo", "zoo"));
     cluster.getSolrClient().commit(collectionName);
@@ -428,7 +413,8 @@ public class TestTlogReplica extends SolrCloudTestCase {
     waitForNumDocsInAllReplicas(2, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS);
     // Start back the node
     if (removeReplica) {
-      CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.TLOG).process(cluster.getSolrClient());
+      addReplicaWithRetries();
+      
     } else {
       leaderJetty.start();
     }
@@ -437,6 +423,22 @@ public class TestTlogReplica extends SolrCloudTestCase {
     waitForNumDocsInAllReplicas(2, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS);
   }
 
+  private void addReplicaWithRetries() throws SolrServerException, IOException {
+    int maxAttempts = 3;
+    for (int i = 0; i < maxAttempts ; i++) {
+      try {
+        CollectionAdminResponse respone = CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.TLOG).process(cluster.getSolrClient());
+        // This is an unfortunate hack. There are cases where the ADDREPLICA fails, will create a Jira to address that separately. for now, we'll retry
+        if (respone.isSuccess()) {
+          break;
+        }
+        log.error("Unsuccessful atempt to add replica. Attempt: %d/%d", i, maxAttempts);
+      } catch (SolrException e) {
+        log.error("Exception while adding replica. Attempt: " + i + "/" +  maxAttempts, e);
+      }
+    }
+  }
+
   public void testKillTlogReplica() throws Exception {
     DocCollection docCollection = createAndWaitForCollection(1, 0, 2, 0);
 
@@ -448,6 +450,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
     JettySolrRunner pullReplicaJetty = cluster.getReplicaJetty(docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.TLOG)).get(0));
     pullReplicaJetty.stop();
     waitForState("Replica not removed", collectionName, activeReplicaCount(0, 1, 0));
+    waitForLeaderChange(pullReplicaJetty, "shard1");
 //    // Also wait for the replica to be placed in state="down"
 //    waitForState("Didn't update state", collectionName, clusterStateReflectsActiveAndDownReplicas());
 
@@ -461,7 +464,6 @@ public class TestTlogReplica extends SolrCloudTestCase {
   }
 
   @Test
-  // Removed BadApple on 2018-05-21
   public void testOnlyLeaderIndexes() throws Exception {
     createAndWaitForCollection(1, 0, 2, 0);
 
@@ -525,7 +527,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
       }
     }
     checkRTG(120,150, cluster.getJettySolrRunners());
-    waitForReplicasCatchUp(20);
+    waitForReplicasCatchUp(4 * REPLICATION_TIMEOUT_SECS);
   }
 
   @SuppressWarnings("unchecked")
@@ -557,7 +559,6 @@ public class TestTlogReplica extends SolrCloudTestCase {
     for (int i = 0; i < 3; i++) {
       UpdateRequest ureq = new UpdateRequest().add(sdoc("id", "7"));
       ureq.setParam("collection", collectionName);
-      ureq.setParam(UpdateRequest.MIN_REPFACT, "2");
       NamedList<Object> response = cloudClient.request(ureq);
       if ((Integer)((NamedList<Object>)response.get("responseHeader")).get(UpdateRequest.REPFACT) >= 2) {
         break;
@@ -595,7 +596,6 @@ public class TestTlogReplica extends SolrCloudTestCase {
     for (int i = 0; i < 3; i++) {
       UpdateRequest ureq = new UpdateRequest().add(sdoc("id", "8"));
       ureq.setParam("collection", collectionName);
-      ureq.setParam(UpdateRequest.MIN_REPFACT, "2");
       NamedList<Object> response = cloudClient.request(ureq);
       if ((Integer)((NamedList<Object>)response.get("responseHeader")).get(UpdateRequest.REPFACT) >= 2) {
         break;
@@ -658,7 +658,11 @@ public class TestTlogReplica extends SolrCloudTestCase {
     JettySolrRunner oldLeaderJetty = getSolrRunner(true).get(0);
     oldLeaderJetty.stop();
     waitForState("Replica not removed", collectionName, activeReplicaCount(0, 1, 0));
-    new UpdateRequest()
+
+    // Even after the replica is gone, a leader may not be elected yet. Wait for it. 
+    waitForLeaderChange(oldLeaderJetty, "shard1");
+    
+    new UpdateRequest()   
         .add(sdoc("id", "3"))
         .add(sdoc("id", "4"))
         .process(cloudClient, collectionName);
@@ -670,6 +674,18 @@ public class TestTlogReplica extends SolrCloudTestCase {
     waitForNumDocsInAllActiveReplicas(4, 0);
   }
 
+  private void waitForLeaderChange(JettySolrRunner oldLeaderJetty, String shardName) {
+    waitForState("Expect new leader", collectionName,
+        (liveNodes, collectionState) -> {
+          Replica leader = collectionState.getLeader(shardName);
+          if (leader == null || !leader.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) {
+            return false;
+          }
+          return oldLeaderJetty == null || !leader.getNodeName().equals(oldLeaderJetty.getNodeName());
+        }
+    );
+  }
+
   public void testOutOfOrderDBQWithInPlaceUpdates() throws Exception {
     createAndWaitForCollection(1,0,2,0);
     assertFalse(getSolrCore(true).get(0).getLatestSchema().getField("inplace_updatable_int").indexed());
@@ -687,16 +703,9 @@ public class TestTlogReplica extends SolrCloudTestCase {
       }
     }
     JettySolrRunner oldLeaderJetty = getSolrRunner(true).get(0);
-    String oldLeaderNodeName = oldLeaderJetty.getNodeName();
     oldLeaderJetty.stop();
     waitForState("Replica not removed", collectionName, activeReplicaCount(0, 1, 0));
-    waitForState("Expect new leader", collectionName,
-        (liveNodes, collectionState) -> {
-          Replica leader = collectionState.getLeader("shard1");
-          if (leader == null) return false;
-          return !leader.getNodeName().equals(oldLeaderNodeName);
-        }
-    );
+    waitForLeaderChange(oldLeaderJetty, "shard1");
     oldLeaderJetty.start();
     waitForState("Replica not added", collectionName, activeReplicaCount(0, 2, 0));
     checkRTG(1,1, cluster.getJettySolrRunners());
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
index 68898fb..cd92ce6 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
@@ -70,7 +70,7 @@ public class AutoAddReplicasIntegrationTest extends SolrCloudTestCase {
 
     new V2Request.Builder("/cluster")
         .withMethod(SolrRequest.METHOD.POST)
-        .withPayload("{set-obj-property:{defaults : {cluster: {useLegacyReplicaAssignment:true}}}}}")
+        .withPayload("{set-obj-property:{defaults : {cluster: {useLegacyReplicaAssignment:true}}}}")
         .build()
         .process(cluster.getSolrClient());
   }
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
index c1b8513..b6e6d20 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
@@ -65,7 +65,7 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
 
     new V2Request.Builder("/cluster")
         .withMethod(SolrRequest.METHOD.POST)
-        .withPayload("{set-obj-property:{defaults : {cluster: {useLegacyReplicaAssignment:true}}}}}")
+        .withPayload("{set-obj-property:{defaults : {cluster: {useLegacyReplicaAssignment:true}}}}")
         .build()
         .process(cluster.getSolrClient());
   }
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimScenario.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimScenario.java
new file mode 100644
index 0000000..24f7b86
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimScenario.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud.autoscaling.sim;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.nio.charset.Charset;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.client.solrj.cloud.autoscaling.Suggester;
+import org.apache.solr.cloud.CloudUtil;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.util.LogLevel;
+import org.junit.Test;
+
+/**
+ *
+ */
+@LogLevel("org.apache.solr.cloud.autoscaling=DEBUG")
+public class TestSimScenario extends SimSolrCloudTestCase {
+
+  // simple scenario to test .autoAddReplicas trigger
+  String autoAddReplicasScenario =
+      "# standard comment\n" +
+      "// java comment\n" +
+      "create_cluster numNodes=2 // inline comment\n" +
+      "load_autoscaling json={'cluster-policy'+:+[{'replica'+:+'<3',+'shard'+:+'#EACH',+'collection'+:+'testCollection','node':'#ANY'}]}&defaultWaitFor=10\n" +
+      "solr_request /admin/collections?action=CREATE&autoAddReplicas=true&name=testCollection&numShards=2&replicationFactor=2&maxShardsPerNode=2\n" +
+      "wait_collection collection=testCollection&shards=2&replicas=2\n" +
+      "event_listener trigger=.auto_add_replicas&stage=SUCCEEDED\n" +
+      "kill_nodes node=${_random_node_}\n" +
+      "wait_event trigger=.auto_add_replicas&wait=60\n" +
+      "wait_collection collection=testCollection&shards=2&replicas=2\n" +
+      "save_snapshot path=${snapshotPath}\n";
+
+  @Test
+  public void testAutoAddReplicas() throws Exception {
+    String snapshotPath = createTempDir() + "/snapshot";
+    try (SimScenario scenario = SimScenario.load(autoAddReplicasScenario)) {
+      scenario.context.put("snapshotPath", snapshotPath);
+      scenario.run();
+    }
+    SnapshotCloudManager snapshotCloudManager = SnapshotCloudManager.readSnapshot(new File(snapshotPath));
+    CloudUtil.waitForState(snapshotCloudManager, "testCollection", 1, TimeUnit.SECONDS,
+        CloudUtil.clusterShape(2, 2));
+  }
+
+  String testSuggestionsScenario =
+      "create_cluster numNodes=2\n" +
+      "solr_request /admin/collections?action=CREATE&autoAddReplicas=true&name=testCollection&numShards=2&replicationFactor=2&maxShardsPerNode=2\n" +
+      "wait_collection collection=testCollection&shards=2&replicas=2\n" +
+      "ctx_set key=myNode&value=${_random_node_}\n" +
+      "solr_request /admin/collections?action=ADDREPLICA&collection=testCollection&shard=shard1&node=${myNode}\n" +
+      "solr_request /admin/collections?action=ADDREPLICA&collection=testCollection&shard=shard1&node=${myNode}\n" +
+      "loop_start iterations=${iterative}\n" +
+      "  calculate_suggestions\n" +
+      "  apply_suggestions\n" +
+      "  solr_request /admin/collections?action=ADDREPLICA&collection=testCollection&shard=shard1&node=${myNode}\n" +
+      "  solr_request /admin/collections?action=ADDREPLICA&collection=testCollection&shard=shard1&node=${myNode}\n" +
+      "loop_end\n" +
+      "loop_start iterations=${justCalc}\n" +
+      "  calculate_suggestions\n" +
+      "loop_end\n" +
+      "dump redact=true";
+
+  @Test
+  public void testSuggestions() throws Exception {
+    try (SimScenario scenario = SimScenario.load(testSuggestionsScenario)) {
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
+      PrintStream ps = new PrintStream(baos, true, Charset.forName("UTF-8"));
+      scenario.console = ps;
+      scenario.context.put("iterative", "0");
+      scenario.context.put("justCalc", "1");
+      scenario.run();
+      List<Suggester.SuggestionInfo> suggestions = (List<Suggester.SuggestionInfo>)scenario.context.get(SimScenario.SUGGESTIONS_CTX_PROP);
+      assertNotNull(suggestions);
+      assertEquals(suggestions.toString(), 1, suggestions.size());
+      // reconstruct the snapshot from the dump
+      Map<String, Object> snapshot = (Map<String, Object>)Utils.fromJSON(baos.toByteArray());
+      Map<String, Object> autoscalingState = (Map<String, Object>)snapshot.get(SnapshotCloudManager.AUTOSCALING_STATE_KEY);
+      assertNotNull(autoscalingState);
+      assertEquals(autoscalingState.toString(), 1, autoscalingState.size());
+      assertTrue(autoscalingState.toString(), autoscalingState.containsKey("suggestions"));
+      List<Map<String, Object>> snapSuggestions = (List<Map<String, Object>>)autoscalingState.get("suggestions");
+      assertEquals(snapSuggestions.toString(), 1, snapSuggestions.size());
+      // _loop_iter_ should be present and 0 (first iteration)
+      assertEquals(0, scenario.context.get(SimScenario.LOOP_ITER_PROP));
+    }
+    // try looping more times
+    try (SimScenario scenario = SimScenario.load(testSuggestionsScenario)) {
+      scenario.context.put("iterative", "10");
+      scenario.context.put("justCalc", "0");
+      scenario.run();
+      assertEquals(9, scenario.context.get(SimScenario.LOOP_ITER_PROP));
+    }
+
+  }
+
+  String indexingScenario =
+      "create_cluster numNodes=100\n" +
+      "solr_request /admin/collections?action=CREATE&autoAddReplicas=true&name=testCollection&numShards=2&replicationFactor=2&maxShardsPerNode=2\n" +
+      "wait_collection collection=testCollection&shards=2&replicas=2\n" +
+      "solr_request /admin/autoscaling?httpMethod=POST&stream.body=" +
+          "{'set-trigger':{'name':'indexSizeTrigger','event':'indexSize','waitFor':'10s','aboveDocs':1000,'enabled':true,"+
+          "'actions':[{'name':'compute_plan','class':'solr.ComputePlanAction'},{'name':'execute_plan','class':'solr.ExecutePlanAction'}]}}\n" +
+      "event_listener trigger=indexSizeTrigger&stage=SUCCEEDED\n" +
+      "index_docs collection=testCollection&numDocs=3000\n" +
+      "run\n" +
+      "wait_event trigger=indexSizeTrigger&wait=60\n" +
+      "assert condition=not_null&key=_trigger_event_indexSizeTrigger\n" +
+      "assert condition=equals&key=_trigger_event_indexSizeTrigger/eventType&expected=INDEXSIZE\n" +
+      "assert condition=equals&key=_trigger_event_indexSizeTrigger/properties/requestedOps[0]/action&expected=SPLITSHARD\n" +
+      "wait_collection collection=testCollection&shards=6&withInactive=true&requireLeaders=false&replicas=2";
+
+  @Test
+  public void testIndexing() throws Exception {
+    try (SimScenario scenario = SimScenario.load(indexingScenario)) {
+      scenario.run();
+    }
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
index b1bd7f4..50fa634 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
@@ -215,10 +215,16 @@ public class TestSnapshotCloudManager extends SolrCloudTestCase {
 
   // ignore these because SimCloudManager always modifies them
   private static final Set<Pattern> IGNORE_DISTRIB_STATE_PATTERNS = new HashSet<>(Arrays.asList(
-      Pattern.compile("/autoscaling/triggerState.*"),
-      Pattern.compile("/clusterstate\\.json"), // different format in SimClusterStateProvider
+      Pattern.compile("/autoscaling/triggerState/.*"),
+      // some triggers may have run after the snapshot was taken
+      Pattern.compile("/autoscaling/events/.*"),
+      // we always use format 1 in SimClusterStateProvider
+      Pattern.compile("/clusterstate\\.json"),
+      // depending on the startup sequence leaders may differ
       Pattern.compile("/collections/[^/]+?/leader_elect/.*"),
       Pattern.compile("/collections/[^/]+?/leaders/.*"),
+      Pattern.compile("/collections/[^/]+?/terms/.*"),
+      Pattern.compile("/overseer_elect/election/.*"),
       Pattern.compile("/live_nodes/.*")
   ));
 
diff --git a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
index b2794df..632fcb6 100644
--- a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
@@ -42,6 +42,7 @@ import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.handler.SnapShooter;
 import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.store.hdfs.HdfsLocalityReporter;
 import org.apache.solr.util.BadHdfsThreadsFilter;
 import org.apache.solr.util.MockCoreContainer.MockCoreDescriptor;
@@ -193,7 +194,7 @@ public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 {
       props.put(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_ENABLE, "false");
       props.put(HdfsDirectoryFactory.LOCALITYMETRICS_ENABLED, "true");
       factory.init(new NamedList<>(props));
-      factory.initializeMetrics(metricManager, registry, "foo", scope);
+      factory.initializeMetrics(new SolrMetricsContext(metricManager, registry, "foo"), scope);
 
       // get the metrics map for the locality bean
       MetricsMap metrics = (MetricsMap) ((SolrMetricManager.GaugeWrapper) metricManager.registry(registry).getMetrics().get("OTHER." + scope + ".hdfsLocality")).getGauge();
diff --git a/solr/core/src/test/org/apache/solr/core/MockInfoBean.java b/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
index 806c6b8..3ea3ace 100644
--- a/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
+++ b/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
@@ -19,16 +19,13 @@ package org.apache.solr.core;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
-import com.codahale.metrics.MetricRegistry;
 import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 
 class MockInfoBean implements SolrInfoBean, SolrMetricProducer {
   Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  MetricRegistry registry;
-  SolrMetricManager metricManager;
-  String registryName;
+  SolrMetricsContext solrMetricsContext;
 
   @Override
   public String getName() {
@@ -51,15 +48,13 @@ class MockInfoBean implements SolrInfoBean, SolrMetricProducer {
   }
 
   @Override
-  public MetricRegistry getMetricRegistry() {
-    return registry;
+  public SolrMetricsContext getSolrMetricsContext() {
+    return solrMetricsContext;
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    this.metricManager = manager;
-    this.registryName = registryName;
-    registry = manager.registry(registryName);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    solrMetricsContext = parentContext.getChildContext(this);
     MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
       map.put("Integer", 123);
       map.put("Double",567.534);
@@ -70,6 +65,6 @@ class MockInfoBean implements SolrInfoBean, SolrMetricProducer {
       map.put("String","testing");
       map.put("Object", new Object());
     });
-    manager.registerGauge(this, registryName, metricsMap, tag, true, getClass().getSimpleName(), getCategory().toString(), scope);
+    solrMetricsContext.gauge(this, metricsMap, true, getClass().getSimpleName(), getCategory().toString(), scope);
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java b/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java
index 0b0c7e5..ad62c40 100644
--- a/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/MockQuerySenderListenerReqHandler.java
@@ -17,7 +17,7 @@
 package org.apache.solr.core;
 
 import org.apache.solr.handler.RequestHandlerBase;
-import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.common.util.NamedList;
@@ -42,9 +42,9 @@ public class MockQuerySenderListenerReqHandler extends RequestHandlerBase {
   }
 
   @Override
-  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
-    super.initializeMetrics(manager, registryName, tag, scope);
-    manager.registerGauge(this, registryName, () -> initCounter.intValue(), tag, true, "initCount", getCategory().toString(), scope);
+  public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+    super.initializeMetrics(parentContext, scope);
+    solrMetricsContext.gauge(this, () -> initCounter.intValue(), true, "initCount", getCategory().toString(), scope);
   }
 
   @Override
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
index 17494e0..178ca2a 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
@@ -139,7 +139,7 @@ public class TestSolrConfigHandler extends RestTestBase {
     assertEquals("10", m._getStr("config/updateHandler/autoCommit/maxTime",null));
     assertEquals("true", m._getStr("config/requestDispatcher/requestParsers/addHttpRequestToContext",null));
     payload = "{\n" +
-        " 'unset-property' :  'updateHandler.autoCommit.maxDocs'} \n" +
+        " 'unset-property' :  'updateHandler.autoCommit.maxDocs' \n" +
         " }";
     runConfigCommand(harness, "/config", payload);
 
diff --git a/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java
index 9ca5e1b..bd4b6bd 100644
--- a/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java
+++ b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java
@@ -17,6 +17,9 @@
 
 package org.apache.solr.filestore;
 
+import static org.apache.solr.common.util.Utils.JAVABINCONSUMER;
+import static org.apache.solr.core.TestDynamicLoading.getFileContent;
+
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
@@ -27,7 +30,6 @@ import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.function.Predicate;
 
-import com.google.common.collect.ImmutableSet;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
@@ -48,8 +50,7 @@ import org.apache.solr.util.LogLevel;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.server.ByteBufferInputStream;
 
-import static org.apache.solr.common.util.Utils.JAVABINCONSUMER;
-import static org.apache.solr.core.TestDynamicLoading.getFileContent;
+import com.google.common.collect.ImmutableSet;
 
 @LogLevel("org.apache.solr.filestore.PackageStoreAPI=DEBUG;org.apache.solr.filestore.DistribPackageStore=DEBUG")
 public class TestDistribPackageStore extends SolrCloudTestCase {
diff --git a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java
index 7065b0d..da990da 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java
@@ -297,5 +297,15 @@ public class TestReqParamsAPI extends SolrCloudTestCase {
         asList("response", "params", "y", "p"),
         null,
         10);
+
+    payload = " {'unset' : 'y'}";
+    TestSolrConfigHandler.runConfigCommandExpectFailure(
+        writeHarness,"/config/params", payload, "Unknown operation 'unset'");
+
+    // deleting already deleted one should fail
+    // error message should contain parameter set name
+    payload = " {'delete' : 'y'}";
+    TestSolrConfigHandler.runConfigCommandExpectFailure(
+        writeHarness,"/config/params", payload, "Could not delete. No such params 'y' exist");
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java
index 08af0a5..5b6c4f1 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java
@@ -262,6 +262,15 @@ public class TestSolrConfigHandlerCloud extends AbstractFullDistribZkTestBase {
         null,
         TIMEOUT_S);
 
+    payload = " {'unset' : 'y'}";
+    TestSolrConfigHandler.runConfigCommandExpectFailure(
+        writeHarness,"/config/params", payload, "Unknown operation 'unset'");
+
+    // deleting already deleted one should fail
+    // error message should contain parameter set name
+    payload = " {'delete' : 'y'}";
+    TestSolrConfigHandler.runConfigCommandExpectFailure(
+        writeHarness,"/config/params", payload, "Could not delete. No such params 'y' exist");
 
   }
 
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index a6dbd9e..534b789 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.handler.admin;
 
+import java.util.Arrays;
 import java.util.Map;
 
 import com.codahale.metrics.Counter;
@@ -24,6 +25,15 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.PluginBag;
+import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricsContext;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -53,12 +63,12 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
   @AfterClass
   public static void cleanupMetrics() throws Exception {
     if (null != h) {
-      h.getCoreContainer().getMetricManager().registry("solr.jvm"  ).remove("solrtest_foo");
+      h.getCoreContainer().getMetricManager().registry("solr.jvm").remove("solrtest_foo");
       h.getCoreContainer().getMetricManager().registry("solr.jetty").remove("solrtest_foo");
       h.getCoreContainer().getMetricManager().registry("solr.jetty").remove("solrtest_foo:bar");
     }
   }
-  
+
   @Test
   public void test() throws Exception {
     MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
@@ -145,7 +155,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
     assertEquals(1, values.size());
-    assertEquals(13, ((NamedList)values.get("solr.node")).size());
+    assertEquals(13, ((NamedList) values.get("solr.node")).size());
     assertNotNull(values.get("solr.node"));
     values = (NamedList) values.get("solr.node");
     assertNotNull(values.get("CONTAINER.cores.lazy")); // this is a gauge node
@@ -171,7 +181,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("solr.core.collection1"));
     values = (NamedList) values.get("solr.core.collection1");
     assertEquals(1, values.size());
-    Map m = (Map)values.get("CACHE.core.fieldCache");
+    Map m = (Map) values.get("CACHE.core.fieldCache");
     assertNotNull(m);
     assertNotNull(m.get("entries_count"));
 
@@ -223,7 +233,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
       assertTrue(v instanceof Map);
-      Map map = (Map)v;
+      Map map = (Map) v;
       assertTrue(map.size() > 2);
     });
 
@@ -238,7 +248,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
       assertTrue(v instanceof Map);
-      Map map = (Map)v;
+      Map map = (Map) v;
       assertEquals(2, map.size());
       assertNotNull(map.get("inserts"));
       assertNotNull(map.get("size"));
@@ -257,7 +267,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     Object val = values.findRecursive("metrics", key1);
     assertNotNull(val);
     assertTrue(val instanceof Map);
-    assertTrue(((Map)val).size() >= 2);
+    assertTrue(((Map) val).size() >= 2);
 
     String key2 = "solr.core.collection1:CACHE.core.fieldCache:entries_count";
     resp = new SolrQueryResponse();
@@ -276,7 +286,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     val = values.findRecursive("metrics", key3);
     assertNotNull(val);
     assertTrue(val instanceof Number);
-    assertEquals(3, ((Number)val).intValue());
+    assertEquals(3, ((Number) val).intValue());
 
     // test multiple keys
     resp = new SolrQueryResponse();
@@ -306,7 +316,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "foo", MetricsHandler.KEY_PARAM, "foo:bar:baz:xyz"), resp);
     values = resp.getValues();
-    NamedList metrics = (NamedList)values.get("metrics");
+    NamedList metrics = (NamedList) values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "foo"));
     assertNotNull(values.findRecursive("errors", "foo:bar:baz:xyz"));
@@ -316,7 +326,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "foo:bar:baz"), resp);
     values = resp.getValues();
-    metrics = (NamedList)values.get("metrics");
+    metrics = (NamedList) values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "foo:bar:baz"));
 
@@ -325,8 +335,122 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "solr.jetty:unknown:baz"), resp);
     values = resp.getValues();
-    metrics = (NamedList)values.get("metrics");
+    metrics = (NamedList) values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "solr.jetty:unknown:baz"));
   }
+
+  @Test
+  public void testMetricsUnload() throws Exception {
+
+    SolrCore core = h.getCoreContainer().getCore("collection1");//;.getRequestHandlers().put("/dumphandler", new DumpRequestHandler());
+    RefreshablePluginHolder pluginHolder =null;
+    try {
+      PluginInfo info = new PluginInfo(SolrRequestHandler.TYPE, Utils.makeMap("name", "/dumphandler", "class", DumpRequestHandler.class.getName()));
+      DumpRequestHandler requestHandler = new DumpRequestHandler();
+      requestHandler.gaugevals =  Utils.makeMap("d_k1","v1", "d_k2","v2");
+      pluginHolder = new RefreshablePluginHolder(info, requestHandler);
+      core.getRequestHandlers().put("/dumphandler",
+
+          pluginHolder);
+    } finally {
+      core.close();
+    }
+
+
+
+    MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
+
+    SolrQueryResponse resp = new SolrQueryResponse();
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true", "key", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"),
+        resp);
+
+    assertEquals("v1", resp.getValues()._getStr(Arrays.asList("metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k1"), null));
+    assertEquals("v2", resp.getValues()._getStr(Arrays.asList("metrics","solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k2"), null));
+    pluginHolder.closeHandler();
+    resp = new SolrQueryResponse();
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true", "key", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"),
+        resp);
+
+    assertEquals(null, resp.getValues()._getStr(Arrays.asList("metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k1"), null));
+    assertEquals(null, resp.getValues()._getStr(Arrays.asList("metrics","solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k2"), null));
+
+    DumpRequestHandler requestHandler = new DumpRequestHandler();
+    requestHandler.gaugevals =  Utils.makeMap("d_k1","v1.1", "d_k2","v2.1");
+    pluginHolder.reset(requestHandler);
+    resp = new SolrQueryResponse();
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true", "key", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"),
+        resp);
+
+    assertEquals("v1.1", resp.getValues()._getStr(Arrays.asList("metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k1"), null));
+    assertEquals("v2.1", resp.getValues()._getStr(Arrays.asList("metrics","solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k2"), null));
+
+    handler.close();
+  }
+
+  static class RefreshablePluginHolder extends PluginBag.PluginHolder<SolrRequestHandler> {
+
+    private DumpRequestHandler rh;
+    private SolrMetricsContext metricsInfo;
+
+    public RefreshablePluginHolder(PluginInfo info, DumpRequestHandler rh) {
+      super(info);
+      this.rh = rh;
+    }
+
+    @Override
+    public boolean isLoaded() {
+      return true;
+    }
+
+    void closeHandler() throws Exception {
+      this.metricsInfo = rh.getSolrMetricsContext();
+//      if(metricsInfo.tag.contains(String.valueOf(rh.hashCode()))){
+//        //this created a new child metrics
+//        metricsInfo = metricsInfo.getParent();
+//      }
+      this.rh.close();
+    }
+
+    void reset(DumpRequestHandler rh) throws Exception {
+        this.rh = rh;
+        if(metricsInfo != null)
+        this.rh.initializeMetrics(metricsInfo, "/dumphandler");
+    }
+
+
+    @Override
+    public SolrRequestHandler get() {
+      return rh;
+    }
+  }
+
+  public static class DumpRequestHandler extends RequestHandlerBase {
+
+    static String key = DumpRequestHandler.class.getName();
+    Map<String, Object> gaugevals ;
+    @Override
+    public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
+      rsp.add("key", key);
+    }
+
+    @Override
+    public String getDescription() {
+      return "DO nothing";
+    }
+
+    @Override
+    public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
+      super.initializeMetrics(parentContext, scope);
+      MetricsMap metrics = new MetricsMap((detailed, map) -> map.putAll(gaugevals));
+      solrMetricsContext.gauge(this,
+           metrics,  true, "dumphandlergauge", getCategory().toString(), scope);
+
+    }
+
+    @Override
+    public Boolean registerV2() {
+      return Boolean.FALSE;
+    }
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java
index 4c058e5..b945803 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHistoryHandlerTest.java
@@ -34,6 +34,7 @@ import org.apache.solr.common.util.Pair;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.util.LogLevel;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -73,9 +74,10 @@ public class MetricsHistoryHandlerTest extends SolrCloudTestCase {
       solrClient = ((SimCloudManager)cloudManager).simGetSolrClient();
       // need to register the factory here, before we start the real cluster
       metricsHandler = new MetricsHandler(metricManager);
+      SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, SolrInfoBean.Group.node.toString(), "");
       handler = new MetricsHistoryHandler(cloudManager.getClusterStateProvider().getLiveNodes().iterator().next(),
           metricsHandler, solrClient, cloudManager, args);
-      handler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), "", CommonParams.METRICS_HISTORY_PATH);
+      handler.initializeMetrics(solrMetricsContext, CommonParams.METRICS_HISTORY_PATH);
     }
     configureCluster(1)
         .addConfig("conf", configset("cloud-minimal"))
@@ -87,7 +89,8 @@ public class MetricsHistoryHandlerTest extends SolrCloudTestCase {
       solrClient = cluster.getSolrClient();
       metricsHandler = new MetricsHandler(metricManager);
       handler = new MetricsHistoryHandler(cluster.getJettySolrRunner(0).getNodeName(), metricsHandler, solrClient, cloudManager, args);
-      handler.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), "", CommonParams.METRICS_HISTORY_PATH);
+      SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, SolrInfoBean.Group.node.toString(), "");
+      handler.initializeMetrics(solrMetricsContext, CommonParams.METRICS_HISTORY_PATH);
       SPEED = 1;
     }
     timeSource = cloudManager.getTimeSource();
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java
index 7a4d7ce..4662a81 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java
@@ -29,9 +29,9 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 /**
- * Test for QueryComponent's distributed querying
+ * Test for distributed ExpandComponent
  *
- * @see org.apache.solr.handler.component.QueryComponent
+ * @see org.apache.solr.handler.component.ExpandComponent
  */
 public class DistributedExpandComponentTest extends BaseDistributedSearchTestCase {
 
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java
index 9dc9050..ceb91f0 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java
@@ -47,17 +47,24 @@ public class DistributedTermsComponentTest extends BaseDistributedSearchTestCase
 
   @Test
   public void test() throws Exception {
-
     Random random = random();
     del("*:*");
+
+    index(id, random.nextInt(), "b_t", "snake a,b spider shark snail slug seal", "foo_i_p", "1");
+    query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_i_p");
+    del("*:*");
+
+    // verify point field on empty index
+    query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_i_p");
+
     index(id, random.nextInt(), "b_t", "snake a,b spider shark snail slug seal", "foo_i", "1");
-    index(id, random.nextInt(), "b_t", "snake spider shark snail slug", "foo_i", "2");
+    index(id, random.nextInt(), "b_t", "snake spider shark snail slug", "foo_i", "2", "foo_date_p", "2015-01-03T14:30:00Z");
     index(id, random.nextInt(), "b_t", "snake spider shark snail", "foo_i", "3");
-    index(id, random.nextInt(), "b_t", "snake spider shark", "foo_i", "2");
-    index(id, random.nextInt(), "b_t", "snake spider", "c_t", "snake spider");
-    index(id, random.nextInt(), "b_t", "snake", "c_t", "snake");
-    index(id, random.nextInt(), "b_t", "ant zebra", "c_t", "ant zebra");
-    index(id, random.nextInt(), "b_t", "zebra", "c_t", "zebra");
+    index(id, random.nextInt(), "b_t", "snake spider shark", "foo_i", "2", "foo_date_p", "2014-03-15T12:00:00Z");
+    index(id, random.nextInt(), "b_t", "snake spider", "c_t", "snake spider", "foo_date_p", "2014-03-15T12:00:00Z");
+    index(id, random.nextInt(), "b_t", "snake", "c_t", "snake", "foo_date_p", "2014-03-15T12:00:00Z");
+    index(id, random.nextInt(), "b_t", "ant zebra", "c_t", "ant zebra", "foo_date_p", "2015-01-03T14:30:00Z");
+    index(id, random.nextInt(), "b_t", "zebra", "c_t", "zebra", "foo_date_p", "2015-01-03T14:30:00Z");
     commit();
 
     handle.clear();
@@ -77,6 +84,11 @@ public class DistributedTermsComponentTest extends BaseDistributedSearchTestCase
     query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_i", "terms.stats", "true","terms.list", "2,3,1");
     query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "b_t", "terms.list", "snake,zebra", "terms.ttf", "true");
     query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "b_t", "terms.fl", "c_t", "terms.list", "snake,ant,zebra", "terms.ttf", "true");
+
+    // for date point field
+    query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_date_p");
+    // terms.ttf=true doesn't work for point fields
+    //query("qt", "/terms", "shards.qt", "/terms", "terms", "true", "terms.fl", "foo_date_p", "terms.ttf", "true");
   }
   
   protected QueryResponse query(Object... q) throws Exception {
diff --git a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java
index b50f31f..d5567ac 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java
@@ -29,6 +29,7 @@ import org.apache.solr.common.params.TermsParams;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.PointMerger;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -39,40 +40,47 @@ public class TermsComponentTest extends SolrTestCaseJ4 {
   public static void beforeTest() throws Exception {
     System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
     initCore("solrconfig.xml", "schema12.xml");
+  }
+
+  @Before
+  public void createIndex() {
+    // cleanup first
+    assertU(delQ("*:*"));
+    assertU(commit());
+
+    assertU(adoc("id", "0", "lowerfilt", "a", "standardfilt", "a", "foo_i", "1"));
+    assertU(adoc("id", "1", "lowerfilt", "a", "standardfilt", "aa", "foo_i","1"));
+    assertU(adoc("id", "2", "lowerfilt", "aa", "standardfilt", "aaa", "foo_i","2"));
+    assertU(adoc("id", "3", "lowerfilt", "aaa", "standardfilt", "abbb"));
+    assertU(adoc("id", "4", "lowerfilt", "ab", "standardfilt", "b"));
+    assertU(adoc("id", "5", "lowerfilt", "abb", "standardfilt", "bb"));
+    assertU(adoc("id", "6", "lowerfilt", "abc", "standardfilt", "bbbb"));
+    assertU(adoc("id", "7", "lowerfilt", "b", "standardfilt", "c"));
+    assertU(adoc("id", "8", "lowerfilt", "baa", "standardfilt", "cccc"));
+    assertU(adoc("id", "9", "lowerfilt", "bbb", "standardfilt", "ccccc"));
+
+
+    assertU(adoc("id", "10", "standardfilt", "ddddd"));
+    assertU(commit());
+
+    assertU(adoc("id", "11", "standardfilt", "ddddd"));
+    assertU(adoc("id", "12", "standardfilt", "ddddd"));
+    assertU(adoc("id", "13", "standardfilt", "ddddd"));
+    assertU(adoc("id", "14", "standardfilt", "d"));
+    assertU(adoc("id", "15", "standardfilt", "d"));
+    assertU(adoc("id", "16", "standardfilt", "d"));
+
+    assertU(commit());
+
+    assertU(adoc("id", "17", "standardfilt", "snake"));
+    assertU(adoc("id", "18", "standardfilt", "spider"));
+    assertU(adoc("id", "19", "standardfilt", "shark"));
+    assertU(adoc("id", "20", "standardfilt", "snake"));
+    assertU(adoc("id", "21", "standardfilt", "snake"));
+    assertU(adoc("id", "22", "standardfilt", "shark"));
+    assertU(adoc("id", "23", "standardfilt", "a,b"));
 
-    assertNull(h.validateUpdate(adoc("id", "0", "lowerfilt", "a", "standardfilt", "a", "foo_i", "1")));
-    assertNull(h.validateUpdate(adoc("id", "1", "lowerfilt", "a", "standardfilt", "aa", "foo_i","1")));
-    assertNull(h.validateUpdate(adoc("id", "2", "lowerfilt", "aa", "standardfilt", "aaa", "foo_i","2")));
-    assertNull(h.validateUpdate(adoc("id", "3", "lowerfilt", "aaa", "standardfilt", "abbb")));
-    assertNull(h.validateUpdate(adoc("id", "4", "lowerfilt", "ab", "standardfilt", "b")));
-    assertNull(h.validateUpdate(adoc("id", "5", "lowerfilt", "abb", "standardfilt", "bb")));
-    assertNull(h.validateUpdate(adoc("id", "6", "lowerfilt", "abc", "standardfilt", "bbbb")));
-    assertNull(h.validateUpdate(adoc("id", "7", "lowerfilt", "b", "standardfilt", "c")));
-    assertNull(h.validateUpdate(adoc("id", "8", "lowerfilt", "baa", "standardfilt", "cccc")));
-    assertNull(h.validateUpdate(adoc("id", "9", "lowerfilt", "bbb", "standardfilt", "ccccc")));
-
-
-    assertNull(h.validateUpdate(adoc("id", "10", "standardfilt", "ddddd")));
-
-    assertNull(h.validateUpdate(commit()));
-    assertNull(h.validateUpdate(adoc("id", "11", "standardfilt", "ddddd")));
-    assertNull(h.validateUpdate(adoc("id", "12", "standardfilt", "ddddd")));
-    assertNull(h.validateUpdate(adoc("id", "13", "standardfilt", "ddddd")));
-    assertNull(h.validateUpdate(adoc("id", "14", "standardfilt", "d")));
-    assertNull(h.validateUpdate(adoc("id", "15", "standardfilt", "d")));
-    assertNull(h.validateUpdate(adoc("id", "16", "standardfilt", "d")));
-
-    assertNull(h.validateUpdate(commit()));
-
-    assertNull(h.validateUpdate(adoc("id", "17", "standardfilt", "snake")));
-    assertNull(h.validateUpdate(adoc("id", "18", "standardfilt", "spider")));
-    assertNull(h.validateUpdate(adoc("id", "19", "standardfilt", "shark")));
-    assertNull(h.validateUpdate(adoc("id", "20", "standardfilt", "snake")));
-    assertNull(h.validateUpdate(adoc("id", "21", "standardfilt", "snake")));
-    assertNull(h.validateUpdate(adoc("id", "22", "standardfilt", "shark")));
-    assertNull(h.validateUpdate(adoc("id", "23", "standardfilt", "a,b")));
-
-    assertNull(h.validateUpdate(commit()));
+    assertU(commit());
   }
 
   @Test
@@ -612,4 +620,32 @@ public class TermsComponentTest extends SolrTestCaseJ4 {
   private static String createShardQueryParamsString(ModifiableSolrParams params) {
     return TermsComponent.createShardQuery(params).params.toString();
   }
+
+  @Test
+  public void testDatePointField() throws Exception {
+    String[] dates = new String[]{"2015-01-03T14:30:00Z", "2014-03-15T12:00:00Z"};
+    for (int i = 0; i < 100; i++) {
+      assertU(adoc("id", Integer.toString(100000+i), "foo_pdt", dates[i % 2]) );
+      if (random().nextInt(10) == 0) assertU(commit());  // make multiple segments
+    }
+    assertU(commit());
+    assertU(adoc("id", Integer.toString(100102), "foo_pdt", dates[1]));
+    assertU(commit());
+
+    assertQ(req("indent","true", "qt","/terms", "terms","true",
+        "terms.fl","foo_pdt", "terms.sort","count"),
+        "count(//lst[@name='foo_pdt']/*)=2",
+        "//lst[@name='foo_pdt']/int[1][@name='" + dates[1] + "'][.='51']",
+        "//lst[@name='foo_pdt']/int[2][@name='" + dates[0] + "'][.='50']"
+    );
+
+    // test on empty index
+    assertU(delQ("*:*"));
+    assertU(commit());
+
+    assertQ(req("indent","true", "qt","/terms", "terms","true",
+        "terms.fl","foo_pdt", "terms.sort","count"),
+        "count(//lst[@name='foo_pdt']/*)=0"
+    );
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java
index f5d26ef..4ba2bd4 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java
@@ -17,6 +17,7 @@
 package org.apache.solr.handler.component;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
@@ -80,42 +81,36 @@ public class TestExpandComponent extends SolrTestCaseJ4 {
   }
 
   private void _testExpand(String group, String floatAppend, String hint) throws Exception {
-
-    String[] doc = {"id","1", "term_s", "YYYY", group, "1"+floatAppend, "test_i", "5", "test_l", "10", "test_f", "2000", "type_s", "parent"};
-    assertU(adoc(doc));
-    assertU(commit());
-    String[] doc1 = {"id","2", "term_s","YYYY", group, "1"+floatAppend, "test_i", "50", "test_l", "100", "test_f", "200", "type_s", "child"};
-    assertU(adoc(doc1));
-
-    String[] doc2 = {"id","3", "term_s", "YYYY", "test_i", "5000", "test_l", "100", "test_f", "200"};
-    assertU(adoc(doc2));
-    assertU(commit());
-    String[] doc3 = {"id","4", "term_s", "YYYY", "test_i", "500", "test_l", "1000", "test_f", "2000"};
-    assertU(adoc(doc3));
-
-
-    String[] doc4 = {"id","5", "term_s", "YYYY", group, "2"+floatAppend, "test_i", "4", "test_l", "10", "test_f", "2000", "type_s", "parent"};
-    assertU(adoc(doc4));
-    assertU(commit());
-    String[] doc5 = {"id","6", "term_s","YYYY", group, "2"+floatAppend, "test_i", "10", "test_l", "100", "test_f", "200", "type_s", "child"};
-    assertU(adoc(doc5));
-    assertU(commit());
-
-    String[] doc6 = {"id","7", "term_s", "YYYY", group, "1"+floatAppend, "test_i", "1", "test_l", "100000", "test_f", "2000", "type_s", "child"};
-    assertU(adoc(doc6));
-    assertU(commit());
-    String[] doc7 = {"id","8", "term_s","YYYY", group, "2"+floatAppend, "test_i", "2", "test_l", "100000", "test_f", "200", "type_s", "child"};
-    assertU(adoc(doc7));
+    String[][] docs = {
+        {"id","1", "term_s", "YYYY", group, "1"+floatAppend, "test_i", "5", "test_l", "10", "test_f", "2000", "type_s", "parent"},
+        {"id","2", "term_s","YYYY", group, "1"+floatAppend, "test_i", "50", "test_l", "100", "test_f", "200", "type_s", "child"},
+        {"id","3", "term_s", "YYYY", "test_i", "5000", "test_l", "100", "test_f", "200"},
+        {"id","4", "term_s", "YYYY", "test_i", "500", "test_l", "1000", "test_f", "2000"},
+        {"id","5", "term_s", "YYYY", group, "2"+floatAppend, "test_i", "4", "test_l", "10", "test_f", "2000", "type_s", "parent"},
+        {"id","6", "term_s","YYYY", group, "2"+floatAppend, "test_i", "10", "test_l", "100", "test_f", "200", "type_s", "child"},
+        {"id","7", "term_s", "YYYY", group, "1"+floatAppend, "test_i", "1", "test_l", "100000", "test_f", "2000", "type_s", "child"},
+        {"id","8", "term_s","YYYY", group, "2"+floatAppend, "test_i", "2", "test_l",  "100000", "test_f", "200", "type_s", "child"}
+    };
+    // randomize addition of docs into bunch of segments
+    // TODO there ought to be a test utility to do this; even add in batches
+    Collections.shuffle(Arrays.asList(docs), random());
+    for (String[] doc : docs) {
+      assertU(adoc(doc));
+      if (random().nextBoolean()) {
+        assertU(commit());
+      }
+    }
 
     assertU(commit());
 
-    //First basic test case.
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.add("q", "*:*");
     params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_i)");
     params.add("expand", "true");
+
+    //First basic test case.
     assertQ(req(params), "*[count(/response/result/doc)=2]",
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/str[@name='id'][.='2']",
@@ -128,15 +123,7 @@ public class TestExpandComponent extends SolrTestCaseJ4 {
 
     //Basic test case page 2
 
-    params = new ModifiableSolrParams();
-    params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+hint+"}");
-    params.add("defType", "edismax");
-    params.add("bf", "field(test_i)");
-    params.add("expand", "true");
-    params.add("rows", "1");
-    params.add("start", "1");
-    assertQ(req(params), "*[count(/response/result/doc)=1]",
+    assertQ(req(params, "rows", "1", "start", "1"), "*[count(/response/result/doc)=1]",
         "*[count(/response/lst[@name='expanded']/result)=1]",
         "/response/result/doc[1]/str[@name='id'][.='6']",
         "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/str[@name='id'][.='5']",
@@ -144,14 +131,9 @@ public class TestExpandComponent extends SolrTestCaseJ4 {
     );
 
     //Test expand.sort
-    params = new ModifiableSolrParams();
-    params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+hint+"}");
-    params.add("defType", "edismax");
-    params.add("bf", "field(test_i)");
-    params.add("expand", "true");
-    params.add("expand.sort", "test_l desc, sub(1,1) asc");//the "sub()" just testing function queries
-    assertQ(req(params), "*[count(/response/result/doc)=2]",
+    //the "sub()" just testing function queries
+    assertQ(req(params,"expand.sort", "test_l desc, sub(1,1) asc"),
+        "*[count(/response/result/doc)=2]",
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/str[@name='id'][.='2']",
         "/response/result/doc[2]/str[@name='id'][.='6']",
@@ -182,7 +164,7 @@ public class TestExpandComponent extends SolrTestCaseJ4 {
     );
 
 
-    //Test overide expand.q
+    //Test override expand.q
 
     params = new ModifiableSolrParams();
     params.add("q", "type_s:parent");
@@ -203,7 +185,7 @@ public class TestExpandComponent extends SolrTestCaseJ4 {
     );
 
 
-    //Test overide expand.fq
+    //Test override expand.fq
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
@@ -224,7 +206,7 @@ public class TestExpandComponent extends SolrTestCaseJ4 {
         "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/str[@name='id'][.='6']"
     );
 
-    //Test overide expand.fq and expand.q
+    //Test override expand.fq and expand.q
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
@@ -303,24 +285,81 @@ public class TestExpandComponent extends SolrTestCaseJ4 {
     params.add("defType", "edismax");
     params.add("bf", "field(test_i)");
     params.add("expand", "true");
-    params.add("fl", "id");
-    assertQ(req(params), "*[count(/response/result/doc)=2]",
+
+    assertQ(req(params, "fl", "id"),
+        "*[count(/response/result/doc)=2]",
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/str[@name='id'][.='2']",
         "/response/result/doc[2]/str[@name='id'][.='6']",
         "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']",
         "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='7']",
         "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/str[@name='id'][.='5']",
-        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/str[@name='id'][.='8']"
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/str[@name='id'][.='8']",
+        "count(//*[@name='score'])=0" // score shouldn't be returned when not requested
+    );
+
+    //Test key-only fl with score but no sorting
+
+    assertQ(req(params, "fl", "id,score"), "*[count(/response/result/doc)=2]",
+        "*[count(/response/lst[@name='expanded']/result)=2]",
+        "/response/result/doc[1]/str[@name='id'][.='2']",
+        "/response/result/doc[2]/str[@name='id'][.='6']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='7']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/str[@name='id'][.='5']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/str[@name='id'][.='8']",
+        "count(//*[@name='score' and .='NaN'])=0"
+    );
+
+
+    // Test with fl and sort=score desc
+    assertQ(req(params, "expand.sort", "score desc", "fl", "id,score"),
+        "*[count(/response/result/doc)=2]",
+        "*[count(/response/lst[@name='expanded']/result)=2]",
+        "/response/result/doc[1]/str[@name='id'][.='2']",
+        "/response/result/doc[2]/str[@name='id'][.='6']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='7']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/str[@name='id'][.='5']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/str[@name='id'][.='8']",
+        "count(//*[@name='score' and .='NaN'])=0"
+    );
+
+    //Test fl with score, sort by non-score
+
+    assertQ(req(params, "expand.sort", "test_l desc", "fl", "id,test_i,score"),
+        "*[count(/response/result/doc)=2]",
+        "count(/response/lst[@name='expanded']/result)=2",
+        "/response/result/doc[1]/str[@name='id'][.='2']",
+        "/response/result/doc[2]/str[@name='id'][.='6']",
+        // note that the expanded docs are score descending order (score is 1 test_i)
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/str[@name='id'][.='8']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/str[@name='id'][.='5']",
+        "count(//*[@name='score' and .='NaN'])=0",
+        "count(/response/lst[@name='expanded']/result/doc[number(*/@name='score')!=number(*/@name='test_i')])=0"
+    );
+
+    //Test fl with score with multi-sort
+
+    assertQ(req(params, "expand.sort", "test_l desc, score asc", "fl", "id,test_i,score"),
+        "*[count(/response/result/doc)=2]",
+        "count(/response/lst[@name='expanded']/result)=2",
+        "/response/result/doc[1]/str[@name='id'][.='2']",
+        "/response/result/doc[2]/str[@name='id'][.='6']",
+        // note that the expanded docs are score descending order (score is 1 test_i)
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/str[@name='id'][.='8']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/str[@name='id'][.='5']",
+        "count(//*[@name='score' and .='NaN'])=0",
+        "count(/response/lst[@name='expanded']/result/doc[number(*/@name='score')!=number(*/@name='test_i')])=0"
     );
   }
 
   @Test
   public void testExpandWithEmptyIndexReturnsZeroResults() {
-    //We make sure the index is cleared
-
-    clearIndex();
-    assertU(commit());
 
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.add("q", "*:*");
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java
index c4eae43..e11c06e 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java
@@ -78,7 +78,7 @@ public final class SolrMetricTestUtils {
   public static SolrMetricProducer getProducerOf(SolrMetricManager metricManager, SolrInfoBean.Category category, String scope, Map<String, Counter> metrics) {
     return new SolrMetricProducer() {
       @Override
-      public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
+      public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
         if (category == null) {
           throw new IllegalArgumentException("null category");
         }
@@ -86,7 +86,7 @@ public final class SolrMetricTestUtils {
           return;
         }
         for (Map.Entry<String, Counter> entry : metrics.entrySet()) {
-          manager.counter(null, registry, entry.getKey(), category.toString(), scope);
+          parentContext.counter(null, entry.getKey(), category.toString(), scope);
         }
       }
 
diff --git a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
index ddbf39a..4ff0f7a 100644
--- a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
+++ b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
@@ -35,6 +35,7 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.GenericSolrRequest;
 import org.apache.solr.client.solrj.request.RequestWriter;
 import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.client.solrj.request.beans.Package;
 import org.apache.solr.client.solrj.util.ClientUtils;
 import org.apache.solr.cloud.ConfigRequest;
 import org.apache.solr.cloud.MiniSolrCloudCluster;
@@ -58,6 +59,7 @@ import static org.apache.solr.core.TestDynamicLoading.getFileContent;
 import static org.apache.solr.filestore.TestDistribPackageStore.readFile;
 
 @LogLevel("org.apache.solr.pkg.PackageLoader=DEBUG;org.apache.solr.pkg.PackageAPI=DEBUG")
+//@org.apache.lucene.util.LuceneTestCase.AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13822") // leaks files
 public class TestPackages extends SolrCloudTestCase {
 
   @Test
@@ -79,7 +81,7 @@ public class TestPackages extends SolrCloudTestCase {
       postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1,
           "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==");
 
-      PackageAPI.AddVersion add = new PackageAPI.AddVersion();
+      Package.AddVersion add = new Package.AddVersion();
       add.version = "1.0";
       add.pkg = "mypkg";
       add.files = Arrays.asList(new String[]{FILE1});
@@ -199,7 +201,7 @@ public class TestPackages extends SolrCloudTestCase {
           Utils.makeMap("Version","2"));
 
 
-      PackageAPI.DelVersion delVersion = new PackageAPI.DelVersion();
+      Package.DelVersion delVersion = new Package.DelVersion();
       delVersion.pkg = "mypkg";
       delVersion.version = "1.0";
       V2Request delete = new V2Request.Builder("/cluster/package")
@@ -348,7 +350,7 @@ public class TestPackages extends SolrCloudTestCase {
       String FILE2 = "/mypkg/v.0.12/jar_b.jar";
       String FILE3 = "/mypkg/v.0.13/jar_a.jar";
 
-      PackageAPI.AddVersion add = new PackageAPI.AddVersion();
+      Package.AddVersion add = new Package.AddVersion();
       add.version = "0.12";
       add.pkg = "test_pkg";
       add.files = Arrays.asList(new String[]{FILE1, FILE2});
@@ -421,7 +423,7 @@ public class TestPackages extends SolrCloudTestCase {
           ));
 
       //Now we will just delete one version
-      PackageAPI.DelVersion delVersion = new PackageAPI.DelVersion();
+      Package.DelVersion delVersion = new Package.DelVersion();
       delVersion.version = "0.1";//this version does not exist
       delVersion.pkg = "test_pkg";
       req = new V2Request.Builder("/cluster/package")
diff --git a/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java
index ae7a762..08c19e1 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java
@@ -32,6 +32,7 @@ import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCase;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.junit.Test;
 
 import com.github.benmanes.caffeine.cache.Cache;
@@ -49,10 +50,11 @@ public class TestCaffeineCache extends SolrTestCase {
   @Test
   public void testSimple() throws IOException {
     CaffeineCache<Integer, String> lfuCache = new CaffeineCache<>();
-    lfuCache.initializeMetrics(metricManager, registry, "foo", scope + "-1");
+    SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
+    lfuCache.initializeMetrics(solrMetricsContext, scope + "-1");
 
     CaffeineCache<Integer, String> newLFUCache = new CaffeineCache<>();
-    newLFUCache.initializeMetrics(metricManager, registry, "foo2", scope + "-2");
+    newLFUCache.initializeMetrics(solrMetricsContext, scope + "-2");
 
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
@@ -281,5 +283,6 @@ public class TestCaffeineCache extends SolrTestCase {
     }
     assertTrue("total ram bytes should be greater than 0", total > 0);
     assertTrue("total ram bytes exceeded limit", total < 1024 * 1024);
+    cache.close();
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
index 271e9a9..209cf70 100644
--- a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
@@ -27,10 +27,10 @@ import org.apache.solr.SolrTestCase;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.util.ConcurrentLRUCache;
 import org.apache.solr.util.RTimer;
 
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
@@ -52,7 +52,7 @@ public class TestFastLRUCache extends SolrTestCase {
   String registry = TestUtil.randomSimpleString(random(), 2, 10);
   String scope = TestUtil.randomSimpleString(random(), 2, 10);
 
-  public void testPercentageAutowarm() throws IOException {
+  public void testPercentageAutowarm() throws Exception {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
@@ -60,7 +60,8 @@ public class TestFastLRUCache extends SolrTestCase {
     params.put("autowarmCount", "100%");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = fastCache.init(params, null, cr);
-    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
+    SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
+    fastCache.initializeMetrics(solrMetricsContext, scope);
     MetricsMap metrics = fastCache.getMetricsMap();
     fastCache.setState(SolrCache.State.LIVE);
     for (int i = 0; i < 101; i++) {
@@ -75,7 +76,7 @@ public class TestFastLRUCache extends SolrTestCase {
     assertEquals(null, fastCache.get(1));  // first item put in should be the first out
     FastLRUCache<Object, Object> fastCacheNew = new FastLRUCache<>();
     fastCacheNew.init(params, o, cr);
-    fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope);
+    fastCacheNew.initializeMetrics(solrMetricsContext, scope);
     metrics = fastCacheNew.getMetricsMap();
     fastCacheNew.warm(null, fastCache);
     fastCacheNew.setState(SolrCache.State.LIVE);
@@ -94,7 +95,7 @@ public class TestFastLRUCache extends SolrTestCase {
     fastCacheNew.close();
   }
   
-  public void testPercentageAutowarmMultiple() throws IOException {
+  public void testPercentageAutowarmMultiple() throws Exception {
     doTestPercentageAutowarm(100, 50, new int[]{51, 55, 60, 70, 80, 99, 100}, new int[]{1, 2, 3, 5, 10, 20, 30, 40, 50});
     doTestPercentageAutowarm(100, 25, new int[]{76, 80, 99, 100}, new int[]{1, 2, 3, 5, 10, 20, 30, 40, 50, 51, 55, 60, 70});
     doTestPercentageAutowarm(1000, 10, new int[]{901, 930, 950, 999, 1000}, new int[]{1, 5, 100, 200, 300, 400, 800, 899, 900});
@@ -102,7 +103,7 @@ public class TestFastLRUCache extends SolrTestCase {
     doTestPercentageAutowarm(100, 0, new int[]{}, new int[]{1, 10, 25, 51, 55, 60, 70, 80, 99, 100, 200, 300});
   }
   
-  private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) {
+  private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) throws Exception {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
     Map<String, String> params = new HashMap<>();
     params.put("size", String.valueOf(limit));
@@ -110,7 +111,8 @@ public class TestFastLRUCache extends SolrTestCase {
     params.put("autowarmCount", percentage + "%");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = fastCache.init(params, null, cr);
-    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
+    SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
+    fastCache.initializeMetrics(solrMetricsContext, scope);
     fastCache.setState(SolrCache.State.LIVE);
     for (int i = 1; i <= limit; i++) {
       fastCache.put(i, "" + i);//adds numbers from 1 to 100
@@ -118,7 +120,7 @@ public class TestFastLRUCache extends SolrTestCase {
 
     FastLRUCache<Object, Object> fastCacheNew = new FastLRUCache<>();
     fastCacheNew.init(params, o, cr);
-    fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope);
+    fastCacheNew.initializeMetrics(solrMetricsContext, scope);
     fastCacheNew.warm(null, fastCache);
     fastCacheNew.setState(SolrCache.State.LIVE);
     fastCache.close();
@@ -136,14 +138,15 @@ public class TestFastLRUCache extends SolrTestCase {
     fastCacheNew.close();
   }
   
-  public void testNoAutowarm() throws IOException {
+  public void testNoAutowarm() throws Exception {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
     params.put("initialSize", "10");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = fastCache.init(params, null, cr);
-    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
+    SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
+    fastCache.initializeMetrics(solrMetricsContext, scope);
     fastCache.setState(SolrCache.State.LIVE);
     for (int i = 0; i < 101; i++) {
       fastCache.put(i + 1, "" + (i + 1));
@@ -166,7 +169,7 @@ public class TestFastLRUCache extends SolrTestCase {
     fastCacheNew.close();
   }
   
-  public void testFullAutowarm() throws IOException {
+  public void testFullAutowarm() throws Exception {
     FastLRUCache<Object, Object> cache = new FastLRUCache<>();
     Map<Object, Object> params = new HashMap<>();
     params.put("size", "100");
@@ -196,7 +199,7 @@ public class TestFastLRUCache extends SolrTestCase {
     cacheNew.close();
   }
   
-  public void testSimple() throws IOException {
+  public void testSimple() throws Exception {
     FastLRUCache sc = new FastLRUCache();
     Map l = new HashMap();
     l.put("size", "100");
@@ -204,7 +207,8 @@ public class TestFastLRUCache extends SolrTestCase {
     l.put("autowarmCount", "25");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = sc.init(l, null, cr);
-    sc.initializeMetrics(metricManager, registry, "foo", scope);
+    SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
+    sc.initializeMetrics(solrMetricsContext, scope);
     sc.setState(SolrCache.State.LIVE);
     for (int i = 0; i < 101; i++) {
       sc.put(i + 1, "" + (i + 1));
@@ -222,7 +226,7 @@ public class TestFastLRUCache extends SolrTestCase {
 
     FastLRUCache scNew = new FastLRUCache();
     scNew.init(l, o, cr);
-    scNew.initializeMetrics(metricManager, registry, "foo", scope);
+    scNew.initializeMetrics(solrMetricsContext, scope);
     scNew.warm(null, sc);
     scNew.setState(SolrCache.State.LIVE);
     sc.close();
@@ -304,7 +308,7 @@ public class TestFastLRUCache extends SolrTestCase {
     System.out.println("time=" + timer.getTime() + ", minSize="+minSize+",maxSize="+maxSize);
   }
 
-  public void testAccountable() {
+  public void testAccountable() throws Exception {
     FastLRUCache<Query, DocSet> sc = new FastLRUCache<>();
     try {
       Map l = new HashMap();
@@ -313,7 +317,8 @@ public class TestFastLRUCache extends SolrTestCase {
       l.put("autowarmCount", "25");
       CacheRegenerator cr = new NoOpRegenerator();
       Object o = sc.init(l, null, cr);
-      sc.initializeMetrics(metricManager, registry, "foo", scope);
+      SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
+      sc.initializeMetrics(solrMetricsContext, scope);
       sc.setState(SolrCache.State.LIVE);
       long initialBytes = sc.ramBytesUsed();
       WildcardQuery q = new WildcardQuery(new Term("foo", "bar"));
@@ -339,7 +344,8 @@ public class TestFastLRUCache extends SolrTestCase {
     params.put("maxRamMB", "8");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = cache.init(params, null, cr);
-    cache.initializeMetrics(metricManager, registry, "foo", scope);
+    SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo");
+    cache.initializeMetrics(solrMetricsContext, scope);
     for (int i = 0; i < 6; i++) {
       cache.put("" + i, new Accountable() {
         @Override
diff --git a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
index 7989d8e..1b0c058 100644
--- a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
@@ -37,6 +37,7 @@ import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.util.ConcurrentLFUCache;
 import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.junit.BeforeClass;
@@ -134,7 +135,7 @@ public class TestLFUCache extends SolrTestCaseJ4 {
 
 
   @Test
-  public void testSimple() throws IOException {
+  public void testSimple() throws Exception {
     SolrMetricManager metricManager = new SolrMetricManager();
     Random r = random();
     String registry = TestUtil.randomSimpleString(r, 2, 10);
@@ -142,9 +143,10 @@ public class TestLFUCache extends SolrTestCaseJ4 {
     LFUCache lfuCache = new LFUCache();
     LFUCache newLFUCache = new LFUCache();
     LFUCache noWarmLFUCache = new LFUCache();
-    lfuCache.initializeMetrics(metricManager, registry, "foo", scope + ".lfuCache");
-    newLFUCache.initializeMetrics(metricManager, registry, "foo", scope + ".newLFUCache");
-    noWarmLFUCache.initializeMetrics(metricManager, registry, "foo", scope + ".noWarmLFUCache");
... 2546 lines suppressed ...


[lucene-solr] 01/10: SOLR-13822: Isolated class loading from packages

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit fe77a4596d8f5b02d7dadf1f11c211a4e8b76efa
Author: noble <no...@apache.org>
AuthorDate: Sat Oct 12 17:16:43 2019 +1100

    SOLR-13822: Isolated class loading from packages
---
 .../java/org/apache/solr/core/CoreContainer.java   |  10 +
 .../src/java/org/apache/solr/core/PluginBag.java   |  15 +-
 .../src/java/org/apache/solr/core/PluginInfo.java  |  39 +-
 .../src/java/org/apache/solr/core/SolrCore.java    |  18 +-
 .../org/apache/solr/core/SolrResourceLoader.java   | 333 ++++++++++--------
 .../org/apache/solr/handler/SolrConfigHandler.java |  23 +-
 .../src/java/org/apache/solr/pkg/PackageAPI.java   | 367 +++++++++++++++++++
 .../java/org/apache/solr/pkg/PackageListeners.java |  89 +++++
 .../java/org/apache/solr/pkg/PackageLoader.java    | 254 +++++++++++++
 .../org/apache/solr/pkg/PackagePluginHolder.java   |  92 +++++
 .../solr/security/PermissionNameProvider.java      |   3 +
 .../src/test/org/apache/solr/pkg/TestPackages.java | 391 +++++++++++++++++++++
 .../org/apache/solr/common/cloud/SolrZkClient.java |   9 +-
 .../apache/solr/common/cloud/ZkStateReader.java    |   1 +
 14 files changed, 1471 insertions(+), 173 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 054bd67..c32783c 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -100,6 +100,7 @@ import org.apache.solr.logging.MDCLoggingContext;
 import org.apache.solr.metrics.SolrCoreMetricManager;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.pkg.PackageLoader;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.search.SolrFieldCacheBean;
@@ -221,6 +222,7 @@ public class CoreContainer {
   protected volatile AutoscalingHistoryHandler autoscalingHistoryHandler;
 
   private PackageStoreAPI packageStoreAPI;
+  private PackageLoader packageLoader;
 
 
   // Bits for the state variable.
@@ -580,6 +582,10 @@ public class CoreContainer {
     return replayUpdatesExecutor;
   }
 
+  public PackageLoader getPackageLoader() {
+    return packageLoader;
+  }
+
   public PackageStoreAPI getPackageStoreAPI() {
     return packageStoreAPI;
   }
@@ -734,6 +740,10 @@ public class CoreContainer {
     if (isZooKeeperAware()) {
       metricManager.loadClusterReporters(metricReporters, this);
     }
+    packageLoader = new PackageLoader(this);
+    containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().editAPI), Collections.EMPTY_MAP);
+    containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().readAPI), Collections.EMPTY_MAP);
+
 
     // setup executor to load cores in parallel
     ExecutorService coreLoadExecutor = MetricUtils.instrumentedExecutorService(
diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java
index 6088f52..bfdfa20 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginBag.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java
@@ -43,6 +43,7 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.handler.component.SearchComponent;
+import org.apache.solr.pkg.PackagePluginHolder;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.update.processor.UpdateRequestProcessorChain;
 import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
@@ -97,7 +98,7 @@ public class PluginBag<T> implements AutoCloseable {
     this(klass, core, false);
   }
 
-  static void initInstance(Object inst, PluginInfo info) {
+  public static void initInstance(Object inst, PluginInfo info) {
     if (inst instanceof PluginInfoInitialized) {
       ((PluginInfoInitialized) inst).init(info);
     } else if (inst instanceof NamedListInitializedPlugin) {
@@ -138,9 +139,13 @@ public class PluginBag<T> implements AutoCloseable {
       log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name);
       return new LazyPluginHolder<T>(meta, info, core, core.getResourceLoader(), false);
     } else {
-      T inst = core.createInstance(info.className, (Class<T>) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader());
-      initInstance(inst, info);
-      return new PluginHolder<>(info, inst);
+      if (info.pkgName != null) {
+        return new PackagePluginHolder<>(info, core, meta);
+      } else {
+        T inst = core.createInstance(info.className, (Class<T>) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader(info.pkgName));
+        initInstance(inst, info);
+        return new PluginHolder<>(info, inst);
+      }
     }
   }
 
@@ -329,7 +334,7 @@ public class PluginBag<T> implements AutoCloseable {
    * subclasses may choose to lazily load the plugin
    */
   public static class PluginHolder<T> implements AutoCloseable {
-    private T inst;
+    protected T inst;
     protected final PluginInfo pluginInfo;
     boolean registerAPI = false;
 
diff --git a/solr/core/src/java/org/apache/solr/core/PluginInfo.java b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
index 1bc85ae..366309e 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginInfo.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
@@ -16,14 +16,20 @@
  */
 package org.apache.solr.core;
 
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.solr.common.MapSerializable;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Pair;
 import org.apache.solr.util.DOMUtil;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
-import java.util.*;
-
 import static java.util.Arrays.asList;
 import static java.util.Collections.unmodifiableList;
 import static java.util.Collections.unmodifiableMap;
@@ -35,27 +41,46 @@ import static org.apache.solr.schema.FieldType.CLASS_NAME;
  *
  */
 public class PluginInfo implements MapSerializable {
-  public final String name, className, type;
+  public final String name, className, type, pkgName;
   public final NamedList initArgs;
   public final Map<String, String> attributes;
   public final List<PluginInfo> children;
   private boolean isFromSolrConfig;
 
+
+
   public PluginInfo(String type, Map<String, String> attrs, NamedList initArgs, List<PluginInfo> children) {
     this.type = type;
     this.name = attrs.get(NAME);
-    this.className = attrs.get(CLASS_NAME);
+    Pair<String, String> parsed = parseClassName(attrs.get(CLASS_NAME));
+    this.className = parsed.second();
+    this.pkgName = parsed.first();
     this.initArgs = initArgs;
     attributes = unmodifiableMap(attrs);
     this.children = children == null ? Collections.<PluginInfo>emptyList(): unmodifiableList(children);
     isFromSolrConfig = false;
   }
+  static Pair<String,String > parseClassName(String name) {
+    String pkgName = null;
+    String className = name;
+    if (name != null) {
+      int colonIdx = name.indexOf(':');
+      if (colonIdx > -1) {
+        pkgName = name.substring(0, colonIdx);
+        className = name.substring(colonIdx + 1);
+      }
+    }
+    return new Pair<>(pkgName, className);
+
+  }
 
 
   public PluginInfo(Node node, String err, boolean requireName, boolean requireClass) {
     type = node.getNodeName();
     name = DOMUtil.getAttr(node, NAME, requireName ? err : null);
-    className = DOMUtil.getAttr(node, CLASS_NAME, requireClass ? err : null);
+    Pair<String, String> parsed = parseClassName(DOMUtil.getAttr(node, CLASS_NAME, requireClass ? err : null));
+    className = parsed.second();
+    pkgName = parsed.first();
     initArgs = DOMUtil.childNodesToNamedList(node);
     attributes = unmodifiableMap(DOMUtil.toMap(node.getAttributes()));
     children = loadSubPlugins(node);
@@ -85,7 +110,9 @@ public class PluginInfo implements MapSerializable {
     }
     this.type = type;
     this.name = (String) m.get(NAME);
-    this.className = (String) m.get(CLASS_NAME);
+    Pair<String, String> parsed = parseClassName((String) m.get(CLASS_NAME));
+    this.className = parsed.second();
+    this.pkgName = parsed.first();
     attributes = unmodifiableMap(m);
     this.children =  Collections.<PluginInfo>emptyList();
     isFromSolrConfig = true;
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 3e2fb1e..59c9a7a 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -110,6 +110,8 @@ import org.apache.solr.logging.MDCLoggingContext;
 import org.apache.solr.metrics.SolrCoreMetricManager;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.pkg.PackageListeners;
+import org.apache.solr.pkg.PackageLoader;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.BinaryResponseWriter;
@@ -237,6 +239,8 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
   public volatile boolean indexEnabled = true;
   public volatile boolean readOnly = false;
 
+  private PackageListeners packageListeners = new PackageListeners();
+
   public Set<String> getMetricNames() {
     return metricNames;
   }
@@ -261,6 +265,10 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
     return restManager;
   }
 
+  public PackageListeners getPackageListeners() {
+    return packageListeners;
+  }
+
   static int boolean_query_max_clause_count = Integer.MIN_VALUE;
 
   private ExecutorService coreAsyncTaskExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("Core Async Task");
@@ -274,6 +282,14 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
     return resourceLoader;
   }
 
+  public SolrResourceLoader getResourceLoader(String pkg) {
+    if (pkg == null) {
+      return resourceLoader;
+    }
+    PackageLoader.Package aPackage = coreContainer.getPackageLoader().getPackage(pkg);
+    return aPackage.getLatest().getLoader();
+  }
+
   /**
    * Gets the configuration resource name used by this core instance.
    *
@@ -856,7 +872,7 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
 
   public <T extends Object> T createInitInstance(PluginInfo info, Class<T> cast, String msg, String defClassName) {
     if (info == null) return null;
-    T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, getResourceLoader());
+    T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, getResourceLoader(info.pkgName));
     if (o instanceof PluginInfoInitialized) {
       ((PluginInfoInitialized) o).init(info);
     } else if (o instanceof NamedListInitializedPlugin) {
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index 4132918..cc1ef7a 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -28,6 +28,7 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.lang.invoke.MethodHandles;
 import java.lang.reflect.Constructor;
+import java.net.MalformedURLException;
 import java.net.URL;
 import java.net.URLClassLoader;
 import java.nio.charset.CharacterCodingException;
@@ -81,11 +82,11 @@ import org.slf4j.LoggerFactory;
 
 /**
  * @since solr 1.3
- */ 
-public class SolrResourceLoader implements ResourceLoader,Closeable
-{
+ */
+public class SolrResourceLoader implements ResourceLoader, Closeable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  private String name = "";
   static final String project = "solr";
   static final String base = "org.apache" + "." + project;
   static final String[] packages = {
@@ -100,7 +101,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   protected URLClassLoader classLoader;
   private final Path instanceDir;
   private String dataDir;
-  
+
   private final List<SolrCoreAware> waitingForCore = Collections.synchronizedList(new ArrayList<SolrCoreAware>());
   private final List<SolrInfoBean> infoMBeans = Collections.synchronizedList(new ArrayList<SolrInfoBean>());
   private final List<ResourceLoaderAware> waitingForResources = Collections.synchronizedList(new ArrayList<ResourceLoaderAware>());
@@ -109,18 +110,18 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   private final Properties coreProperties;
 
   private volatile boolean live;
-  
+
   // Provide a registry so that managed resources can register themselves while the XML configuration
   // documents are being parsed ... after all are registered, they are asked by the RestManager to
   // initialize themselves. This two-step process is required because not all resources are available
   // (such as the SolrZkClient) when XML docs are being parsed.    
   private RestManager.Registry managedResourceRegistry;
-  
+
   public synchronized RestManager.Registry getManagedResourceRegistry() {
     if (managedResourceRegistry == null) {
-      managedResourceRegistry = new RestManager.Registry();      
+      managedResourceRegistry = new RestManager.Registry();
     }
-    return managedResourceRegistry; 
+    return managedResourceRegistry;
   }
 
   public SolrResourceLoader() {
@@ -134,11 +135,20 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
    * found in the "lib/" directory in the specified instance directory.
    * If the instance directory is not specified (=null), SolrResourceLoader#locateInstanceDir will provide one.
    */
-  public SolrResourceLoader(Path instanceDir, ClassLoader parent)
-  {
+  public SolrResourceLoader(Path instanceDir, ClassLoader parent) {
     this(instanceDir, parent, null);
   }
 
+  public SolrResourceLoader(String name, List<Path> classpath, Path instanceDir, ClassLoader parent) throws MalformedURLException {
+    this(instanceDir, parent);
+    this.name = name;
+    for (Path path : classpath) {
+      addToClassLoader(path.toUri().normalize().toURL());
+    }
+
+  }
+
+
   public SolrResourceLoader(Path instanceDir) {
     this(instanceDir, null, null);
   }
@@ -157,7 +167,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
     if (instanceDir == null) {
       this.instanceDir = SolrResourceLoader.locateSolrHome().toAbsolutePath().normalize();
       log.debug("new SolrResourceLoader for deduced Solr Home: '{}'", this.instanceDir);
-    } else{
+    } else {
       this.instanceDir = instanceDir.toAbsolutePath().normalize();
       log.debug("new SolrResourceLoader for directory: '{}'", this.instanceDir);
     }
@@ -167,7 +177,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
     }
     this.classLoader = URLClassLoader.newInstance(new URL[0], parent);
 
-    /* 
+    /*
      * Skip the lib subdirectory when we are loading from the solr home.
      * Otherwise load it, so core lib directories still get loaded.
      * The default sharedLib will pick this up later, and if the user has
@@ -264,6 +274,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
 
   /**
    * Utility method to get the URLs of all paths under a given directory that match a filter
+   *
    * @param libDir the root directory
    * @param filter the filter
    * @return all matching URLs
@@ -296,8 +307,9 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
 
   /**
    * Utility method to get the URLs of all paths under a given directory that match a regex
+   *
    * @param libDir the root directory
-   * @param regex the regex as a String
+   * @param regex  the regex as a String
    * @return all matching URLs
    * @throws IOException on error
    */
@@ -310,15 +322,17 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
       }
     });
   }
-  
-  /** Ensures a directory name always ends with a '/'. */
+
+  /**
+   * Ensures a directory name always ends with a '/'.
+   */
   public static String normalizeDir(String path) {
-    return ( path != null && (!(path.endsWith("/") || path.endsWith("\\"))) )? path + File.separator : path;
+    return (path != null && (!(path.endsWith("/") || path.endsWith("\\")))) ? path + File.separator : path;
   }
-  
+
   public String[] listConfigDir() {
     File configdir = new File(getConfigDir());
-    if( configdir.exists() && configdir.isDirectory() ) {
+    if (configdir.exists() && configdir.isDirectory()) {
       return configdir.list();
     } else {
       return new String[0];
@@ -328,8 +342,8 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   public String getConfigDir() {
     return instanceDir.resolve("conf").toString();
   }
-  
-  public String getDataDir()    {
+
+  public String getDataDir() {
     return dataDir;
   }
 
@@ -341,23 +355,28 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
    * EXPERT
    * <p>
    * The underlying class loader.  Most applications will not need to use this.
+   *
    * @return The {@link ClassLoader}
    */
   public ClassLoader getClassLoader() {
     return classLoader;
   }
 
-  /** Opens a schema resource by its name.
+  /**
+   * Opens a schema resource by its name.
    * Override this method to customize loading schema resources.
-   *@return the stream for the named schema
+   *
+   * @return the stream for the named schema
    */
   public InputStream openSchema(String name) throws IOException {
     return openResource(name);
   }
-  
-  /** Opens a config resource by its name.
+
+  /**
+   * Opens a config resource by its name.
    * Override this method to customize loading config resources.
-   *@return the stream for the named configuration
+   *
+   * @return the stream for the named configuration
    */
   public InputStream openConfig(String name) throws IOException {
     return openResource(name);
@@ -372,14 +391,16 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
     throw new IOException("File " + pathToCheck + " is outside resource loader dir " + instanceDir +
         "; set -Dsolr.allow.unsafe.resourceloading=true to allow unsafe loading");
   }
-  
-  /** Opens any resource by its name.
+
+  /**
+   * Opens any resource by its name.
    * By default, this will look in multiple locations to load the resource:
    * $configDir/$resource (if resource is not absolute)
    * $CWD/$resource
    * otherwise, it will look for it in any jar accessible through the class loader.
    * Override this method to customize loading resources.
-   *@return the stream for the named resource
+   *
+   * @return the stream for the named resource
    */
   @Override
   public InputStream openResource(String resource) throws IOException {
@@ -461,22 +482,22 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
    * @throws IOException If there is a low-level I/O error.
    */
   public List<String> getLines(String resource,
-      String encoding) throws IOException {
+                               String encoding) throws IOException {
     return getLines(resource, Charset.forName(encoding));
   }
 
 
-  public List<String> getLines(String resource, Charset charset) throws IOException{
+  public List<String> getLines(String resource, Charset charset) throws IOException {
     try {
       return WordlistLoader.getLines(openResource(resource), charset);
     } catch (CharacterCodingException ex) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, 
-         "Error loading resource (wrong encoding?): " + resource, ex);
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+          "Error loading resource (wrong encoding?): " + resource, ex);
     }
   }
 
   /*
-   * A static map of short class name to fully qualified class name 
+   * A static map of short class name to fully qualified class name
    */
   private static final Map<String, String> classNameCache = new ConcurrentHashMap<>();
 
@@ -486,14 +507,14 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   }
 
   // Using this pattern, legacy analysis components from previous Solr versions are identified and delegated to SPI loader:
-  private static final Pattern legacyAnalysisPattern = 
-      Pattern.compile("((\\Q"+base+".analysis.\\E)|(\\Q"+project+".\\E))([\\p{L}_$][\\p{L}\\p{N}_$]+?)(TokenFilter|Filter|Tokenizer|CharFilter)Factory");
+  private static final Pattern legacyAnalysisPattern =
+      Pattern.compile("((\\Q" + base + ".analysis.\\E)|(\\Q" + project + ".\\E))([\\p{L}_$][\\p{L}\\p{N}_$]+?)(TokenFilter|Filter|Tokenizer|CharFilter)Factory");
 
   @Override
   public <T> Class<? extends T> findClass(String cname, Class<T> expectedType) {
     return findClass(cname, expectedType, empty);
   }
-  
+
   /**
    * This method loads a class either with its FQN or a short-name (solr.class-simplename or class-simplename).
    * It tries to load the class with the name that is given first and if it fails, it tries all the known
@@ -501,25 +522,25 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
    * for the same class faster. The caching is done only if the class is loaded by the webapp classloader and it
    * is loaded using a shortname.
    *
-   * @param cname The name or the short name of the class.
+   * @param cname       The name or the short name of the class.
    * @param subpackages the packages to be tried if the cname starts with solr.
    * @return the loaded class. An exception is thrown if it fails
    */
   public <T> Class<? extends T> findClass(String cname, Class<T> expectedType, String... subpackages) {
     if (subpackages == null || subpackages.length == 0 || subpackages == packages) {
       subpackages = packages;
-      String  c = classNameCache.get(cname);
-      if(c != null) {
+      String c = classNameCache.get(cname);
+      if (c != null) {
         try {
           return Class.forName(c, true, classLoader).asSubclass(expectedType);
         } catch (ClassNotFoundException | ClassCastException e) {
           // this can happen if the legacyAnalysisPattern below caches the wrong thing
-          log.warn("Unable to load cached class, attempting lookup. name={} shortname={} reason={}", c, cname, e);
+          log.warn( name + " Unable to load cached class, attempting lookup. name={} shortname={} reason={}", c, cname, e);
           classNameCache.remove(cname);
         }
       }
     }
-    
+
     Class<? extends T> clazz = null;
     try {
       // first try legacy analysis patterns, now replaced by Lucene's Analysis package:
@@ -537,43 +558,43 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
           } else {
             log.warn("'{}' looks like an analysis factory, but caller requested different class type: {}", cname, expectedType.getName());
           }
-        } catch (IllegalArgumentException ex) { 
+        } catch (IllegalArgumentException ex) {
           // ok, we fall back to legacy loading
         }
       }
-      
+
       // first try cname == full name
       try {
         return clazz = Class.forName(cname, true, classLoader).asSubclass(expectedType);
       } catch (ClassNotFoundException e) {
-        String newName=cname;
+        String newName = cname;
         if (newName.startsWith(project)) {
-          newName = cname.substring(project.length()+1);
+          newName = cname.substring(project.length() + 1);
         }
         for (String subpackage : subpackages) {
           try {
             String name = base + '.' + subpackage + newName;
             log.trace("Trying class name " + name);
-            return clazz = Class.forName(name,true,classLoader).asSubclass(expectedType);
+            return clazz = Class.forName(name, true, classLoader).asSubclass(expectedType);
           } catch (ClassNotFoundException e1) {
             // ignore... assume first exception is best.
           }
         }
-    
-        throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error loading class '" + cname + "'", e);
+
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, name +" Error loading class '" + cname + "'", e);
       }
-      
+
     } finally {
       if (clazz != null) {
         //cache the shortname vs FQN if it is loaded by the webapp classloader  and it is loaded
         // using a shortname
         if (clazz.getClassLoader() == SolrResourceLoader.class.getClassLoader() &&
-              !cname.equals(clazz.getName()) &&
-              (subpackages.length == 0 || subpackages == packages)) {
+            !cname.equals(clazz.getName()) &&
+            (subpackages.length == 0 || subpackages == packages)) {
           //store in the cache
           classNameCache.put(cname, clazz.getName());
         }
-        
+
         // print warning if class is deprecated
         if (clazz.isAnnotationPresent(Deprecated.class)) {
           log.warn("Solr loaded a deprecated plugin/analysis class [{}]. Please consult documentation how to replace it accordingly.",
@@ -582,9 +603,9 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
       }
     }
   }
-  
+
   static final String empty[] = new String[0];
-  
+
   @Override
   public <T> T newInstance(String name, Class<T> expectedType) {
     return newInstance(name, expectedType, empty);
@@ -593,33 +614,32 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   private static final Class[] NO_CLASSES = new Class[0];
   private static final Object[] NO_OBJECTS = new Object[0];
 
-  public <T> T newInstance(String cname, Class<T> expectedType, String ... subpackages) {
+  public <T> T newInstance(String cname, Class<T> expectedType, String... subpackages) {
     return newInstance(cname, expectedType, subpackages, NO_CLASSES, NO_OBJECTS);
   }
 
-  public CoreAdminHandler newAdminHandlerInstance(final CoreContainer coreContainer, String cname, String ... subpackages) {
+  public CoreAdminHandler newAdminHandlerInstance(final CoreContainer coreContainer, String cname, String... subpackages) {
     Class<? extends CoreAdminHandler> clazz = findClass(cname, CoreAdminHandler.class, subpackages);
-    if( clazz == null ) {
-      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
-          "Can not find class: "+cname + " in " + classLoader);
+    if (clazz == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+          "Can not find class: " + cname + " in " + classLoader);
     }
-    
+
     CoreAdminHandler obj = null;
     try {
       Constructor<? extends CoreAdminHandler> ctor = clazz.getConstructor(CoreContainer.class);
       obj = ctor.newInstance(coreContainer);
-    } 
-    catch (Exception e) {
-      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
-          "Error instantiating class: '" + clazz.getName()+"'", e);
+    } catch (Exception e) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+          "Error instantiating class: '" + clazz.getName() + "'", e);
     }
 
     if (!live) {
       //TODO: Does SolrCoreAware make sense here since in a multi-core context
       // which core are we talking about ?
-      if( obj instanceof ResourceLoaderAware ) {
-        assertAwareCompatibility( ResourceLoaderAware.class, obj );
-        waitingForResources.add( (ResourceLoaderAware)obj );
+      if (obj instanceof ResourceLoaderAware) {
+        assertAwareCompatibility(ResourceLoaderAware.class, obj);
+        waitingForResources.add((ResourceLoaderAware) obj);
       }
     }
 
@@ -627,12 +647,11 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   }
 
 
-
-  public <T> T newInstance(String cName, Class<T> expectedType, String [] subPackages, Class[] params, Object[] args){
+  public <T> T newInstance(String cName, Class<T> expectedType, String[] subPackages, Class[] params, Object[] args) {
     Class<? extends T> clazz = findClass(cName, expectedType, subPackages);
-    if( clazz == null ) {
-      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
-          "Can not find class: "+cName + " in " + classLoader);
+    if (clazz == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+          "Can not find class: " + cName + " in " + classLoader);
     }
 
     T obj = null;
@@ -653,25 +672,25 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
       }
 
     } catch (Error err) {
-      log.error("Loading Class " + cName + " ("+clazz.getName() + ") triggered serious java error: "
-                + err.getClass().getName(), err);
+      log.error("Loading Class " + cName + " (" + clazz.getName() + ") triggered serious java error: "
+          + err.getClass().getName(), err);
       throw err;
 
     } catch (Exception e) {
-      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
-          "Error instantiating class: '" + clazz.getName()+"'", e);
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+          "Error instantiating class: '" + clazz.getName() + "'", e);
     }
 
     if (!live) {
-      if( obj instanceof SolrCoreAware ) {
-        assertAwareCompatibility( SolrCoreAware.class, obj );
-        waitingForCore.add( (SolrCoreAware)obj );
+      if (obj instanceof SolrCoreAware) {
+        assertAwareCompatibility(SolrCoreAware.class, obj);
+        waitingForCore.add((SolrCoreAware) obj);
       }
-      if( obj instanceof ResourceLoaderAware ) {
-        assertAwareCompatibility( ResourceLoaderAware.class, obj );
-        waitingForResources.add( (ResourceLoaderAware)obj );
+      if (obj instanceof ResourceLoaderAware) {
+        assertAwareCompatibility(ResourceLoaderAware.class, obj);
+        waitingForResources.add((ResourceLoaderAware) obj);
       }
-      if (obj instanceof SolrInfoBean){
+      if (obj instanceof SolrInfoBean) {
         //TODO: Assert here?
         infoMBeans.add((SolrInfoBean) obj);
       }
@@ -680,12 +699,11 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
     return obj;
   }
 
-  
+
   /**
    * Tell all {@link SolrCoreAware} instances about the SolrCore
    */
-  public void inform(SolrCore core) 
-  {
+  public void inform(SolrCore core) {
     this.dataDir = core.getDataDir();
 
     // make a copy to avoid potential deadlock of a callback calling newInstance and trying to
@@ -698,22 +716,21 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
         waitingForCore.clear();
       }
 
-      for( SolrCoreAware aware : arr) {
-        aware.inform( core );
+      for (SolrCoreAware aware : arr) {
+        aware.inform(core);
       }
     }
 
     // this is the last method to be called in SolrCore before the latch is released.
     live = true;
   }
-  
+
   /**
    * Tell all {@link ResourceLoaderAware} instances about the loader
    */
-  public void inform( ResourceLoader loader ) throws IOException
-  {
+  public void inform(ResourceLoader loader) throws IOException {
 
-     // make a copy to avoid potential deadlock of a callback adding to the list
+    // make a copy to avoid potential deadlock of a callback adding to the list
     ResourceLoaderAware[] arr;
 
     while (waitingForResources.size() > 0) {
@@ -722,7 +739,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
         waitingForResources.clear();
       }
 
-      for( ResourceLoaderAware aware : arr) {
+      for (ResourceLoaderAware aware : arr) {
         aware.inform(loader);
       }
     }
@@ -730,6 +747,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
 
   /**
    * Register any {@link SolrInfoBean}s
+   *
    * @param infoRegistry The Info Registry
    */
   public void inform(Map<String, SolrInfoBean> infoRegistry) {
@@ -755,7 +773,7 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
       }
     }
   }
-  
+
   /**
    * Determines the solrhome from the environment.
    * Tries JNDI (java:comp/env/solr/home) then system property (solr.solr.home);
@@ -765,12 +783,13 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   /**
    * Finds the solrhome based on looking up the value in one of three places:
    * <ol>
-   *  <li>JNDI: via java:comp/env/solr/home</li>
-   *  <li>The system property solr.solr.home</li>
-   *  <li>Look in the current working directory for a solr/ directory</li> 
+   * <li>JNDI: via java:comp/env/solr/home</li>
+   * <li>The system property solr.solr.home</li>
+   * <li>Look in the current working directory for a solr/ directory</li>
    * </ol>
-   *
+   * <p>
    * The return value is normalized.  Normalization essentially means it ends in a trailing slash.
+   *
    * @return A normalized solrhome
    * @see #normalizeDir(String)
    */
@@ -780,27 +799,27 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
     // Try JNDI
     try {
       Context c = new InitialContext();
-      home = (String)c.lookup("java:comp/env/"+project+"/home");
-      logOnceInfo("home_using_jndi", "Using JNDI solr.home: "+home );
+      home = (String) c.lookup("java:comp/env/" + project + "/home");
+      logOnceInfo("home_using_jndi", "Using JNDI solr.home: " + home);
     } catch (NoInitialContextException e) {
-      log.debug("JNDI not configured for "+project+" (NoInitialContextEx)");
+      log.debug("JNDI not configured for " + project + " (NoInitialContextEx)");
     } catch (NamingException e) {
-      log.debug("No /"+project+"/home in JNDI");
-    } catch( RuntimeException ex ) {
+      log.debug("No /" + project + "/home in JNDI");
+    } catch (RuntimeException ex) {
       log.warn("Odd RuntimeException while testing for JNDI: " + ex.getMessage());
-    } 
-    
+    }
+
     // Now try system property
-    if( home == null ) {
+    if (home == null) {
       String prop = project + ".solr.home";
       home = System.getProperty(prop);
-      if( home != null ) {
-        logOnceInfo("home_using_sysprop", "Using system property "+prop+": " + home );
+      if (home != null) {
+        logOnceInfo("home_using_sysprop", "Using system property " + prop + ": " + home);
       }
     }
-    
+
     // if all else fails, try 
-    if( home == null ) {
+    if (home == null) {
       home = project + '/';
       logOnceInfo("home_default", project + " home defaulted to '" + home + "' (could not find system property or JNDI)");
     }
@@ -809,22 +828,23 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
 
   /**
    * Solr allows users to store arbitrary files in a special directory located directly under SOLR_HOME.
-   *
+   * <p>
    * This directory is generally created by each node on startup.  Files located in this directory can then be
    * manipulated using select Solr features (e.g. streaming expressions).
    */
   public static final String USER_FILES_DIRECTORY = "userfiles";
+
   public static void ensureUserFilesDataDir(Path solrHome) {
     final Path userFilesPath = getUserFilesPath(solrHome);
     final File userFilesDirectory = new File(userFilesPath.toString());
-    if (! userFilesDirectory.exists()) {
+    if (!userFilesDirectory.exists()) {
       try {
         final boolean created = userFilesDirectory.mkdir();
-        if (! created) {
+        if (!created) {
           log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.", USER_FILES_DIRECTORY, solrHome);
         }
       } catch (Exception e) {
-          log.warn("Unable to create [" + USER_FILES_DIRECTORY + "] directory in SOLR_HOME [" + solrHome + "].  Features requiring this directory may fail.", e);
+        log.warn("Unable to create [" + USER_FILES_DIRECTORY + "] directory in SOLR_HOME [" + solrHome + "].  Features requiring this directory may fail.", e);
       }
     }
   }
@@ -847,72 +867,73 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
   public Path getInstancePath() {
     return instanceDir;
   }
-  
+
   /**
    * Keep a list of classes that are allowed to implement each 'Aware' interface
    */
   private static final Map<Class, Class[]> awareCompatibility;
+
   static {
     awareCompatibility = new HashMap<>();
-    awareCompatibility.put( 
-      SolrCoreAware.class, new Class[] {
-        // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY
-        // VIA RUNTIME APIS -- UNTILL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311
-        CodecFactory.class,
-        DirectoryFactory.class,
-        ManagedIndexSchemaFactory.class,
-        QueryResponseWriter.class,
-        SearchComponent.class,
-        ShardHandlerFactory.class,
-        SimilarityFactory.class,
-        SolrRequestHandler.class,
-        UpdateRequestProcessorFactory.class
-      }
+    awareCompatibility.put(
+        SolrCoreAware.class, new Class[]{
+            // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY
+            // VIA RUNTIME APIS -- UNTILL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311
+            CodecFactory.class,
+            DirectoryFactory.class,
+            ManagedIndexSchemaFactory.class,
+            QueryResponseWriter.class,
+            SearchComponent.class,
+            ShardHandlerFactory.class,
+            SimilarityFactory.class,
+            SolrRequestHandler.class,
+            UpdateRequestProcessorFactory.class
+        }
     );
 
     awareCompatibility.put(
-      ResourceLoaderAware.class, new Class[] {
-        // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY
-        // VIA RUNTIME APIS -- UNTILL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311
-        CharFilterFactory.class,
-        TokenFilterFactory.class,
-        TokenizerFactory.class,
-        QParserPlugin.class,
-        FieldType.class
-      }
+        ResourceLoaderAware.class, new Class[]{
+            // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY
+            // VIA RUNTIME APIS -- UNTILL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311
+            CharFilterFactory.class,
+            TokenFilterFactory.class,
+            TokenizerFactory.class,
+            QParserPlugin.class,
+            FieldType.class
+        }
     );
   }
 
   /**
    * Utility function to throw an exception if the class is invalid
    */
-  static void assertAwareCompatibility( Class aware, Object obj )
-  {
-    Class[] valid = awareCompatibility.get( aware );
-    if( valid == null ) {
-      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
-          "Unknown Aware interface: "+aware );
-    }
-    for( Class v : valid ) {
-      if( v.isInstance( obj ) ) {
+  static void assertAwareCompatibility(Class aware, Object obj) {
+    Class[] valid = awareCompatibility.get(aware);
+    if (valid == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+          "Unknown Aware interface: " + aware);
+    }
+    for (Class v : valid) {
+      if (v.isInstance(obj)) {
         return;
       }
     }
     StringBuilder builder = new StringBuilder();
-    builder.append( "Invalid 'Aware' object: " ).append( obj );
-    builder.append( " -- ").append( aware.getName() );
-    builder.append(  " must be an instance of: " );
-    for( Class v : valid ) {
-      builder.append( "[" ).append( v.getName() ).append( "] ") ;
+    builder.append("Invalid 'Aware' object: ").append(obj);
+    builder.append(" -- ").append(aware.getName());
+    builder.append(" must be an instance of: ");
+    for (Class v : valid) {
+      builder.append("[").append(v.getName()).append("] ");
     }
-    throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, builder.toString() );
+    throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, builder.toString());
   }
 
   @Override
   public void close() throws IOException {
     IOUtils.close(classLoader);
   }
-  public List<SolrInfoBean> getInfoMBeans(){
+
+  public List<SolrInfoBean> getInfoMBeans() {
     return Collections.unmodifiableList(infoMBeans);
   }
 
@@ -922,8 +943,8 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
     File confFile = new File(loader.getConfigDir(), resourceName);
     try {
       File parentDir = confFile.getParentFile();
-      if ( ! parentDir.isDirectory()) {
-        if ( ! parentDir.mkdirs()) {
+      if (!parentDir.isDirectory()) {
+        if (!parentDir.mkdirs()) {
           final String msg = "Can't create managed schema directory " + parentDir.getAbsolutePath();
           log.error(msg);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index 11c6404..2085221 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -68,6 +68,7 @@ import org.apache.solr.core.RequestParams;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.pkg.PackageListeners;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
@@ -245,8 +246,22 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
             if (componentName != null) {
               Map map = (Map) val.get(parts.get(1));
               if (map != null) {
-                val.put(parts.get(1), makeMap(componentName, map.get(componentName)));
+                Object o = map.get(componentName);
+                val.put(parts.get(1), makeMap(componentName, o));
+                if(req.getParams().getBool("meta", false)){
+                  for (PackageListeners.Listener listener :
+                      req.getCore().getPackageListeners().getListeners()) {
+                    PluginInfo info = listener.pluginInfo();
+                    if(info.type.equals(parts.get(1)) && info.name.equals(componentName)){
+                      if (o instanceof Map) {
+                        Map m1 = (Map) o;
+                        m1.put("_packageinfo_", listener.getPackageVersion());
+                      }
+                    }
+                  }
+                }
               }
+
             }
 
             resp.add("config", val);
@@ -488,6 +503,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
       }
       List errs = CommandOperation.captureErrors(ops);
       if (!errs.isEmpty()) {
+        log.error("ERRROR:" +Utils.toJSONString(errs));
         throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing commands", errs);
       }
 
@@ -495,7 +511,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
       if (loader instanceof ZkSolrResourceLoader) {
         int latestVersion = ZkController.persistConfigResourceToZooKeeper((ZkSolrResourceLoader) loader, overlay.getZnodeVersion(),
             ConfigOverlay.RESOURCE_NAME, overlay.toByteArray(), true);
-        log.info("Executed config commands successfully and persisted to ZK {}", ops);
+        log.debug("Executed config commands successfully and persisted to ZK {}", ops);
         waitForAllReplicasState(req.getCore().getCoreDescriptor().getCloudDescriptor().getCollectionName(),
             req.getCore().getCoreContainer().getZkController(),
             ConfigOverlay.NAME,
@@ -503,7 +519,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
       } else {
         SolrResourceLoader.persistConfLocally(loader, ConfigOverlay.RESOURCE_NAME, overlay.toByteArray());
         req.getCore().getCoreContainer().reload(req.getCore().getName());
-        log.info("Executed config commands successfully and persited to File System {}", ops);
+        log.debug("Executed config commands successfully and persited to File System {}", ops);
       }
 
     }
@@ -570,6 +586,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
         try {
           req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), expected, clz, "");
         } catch (Exception e) {
+          log.error("Error checking plugin : ",e);
           op.addError(e.getMessage());
           return false;
         }
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
new file mode 100644
index 0000000..0267f37
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -0,0 +1,367 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.pkg;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.solr.api.Command;
+import org.apache.solr.api.EndPoint;
+import org.apache.solr.api.PayloadObj;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.cloud.ZooKeeperException;
+import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.filestore.PackageStoreAPI;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.util.ReflectMapWriter;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.cloud.ZkStateReader.SOLR_PKGS_PATH;
+import static org.apache.solr.security.PermissionNameProvider.Name.PACKAGE_EDIT_PERM;
+import static org.apache.solr.security.PermissionNameProvider.Name.PACKAGE_READ_PERM;
+
+public class PackageAPI {
+  public static final String PACKAGES = "packages";
+  public final boolean enablePackages = Boolean.parseBoolean(System.getProperty("enable.packages", "false"));
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  final CoreContainer coreContainer;
+  private ObjectMapper mapper = new ObjectMapper();
+  private final PackageLoader packageLoader;
+  Packages pkgs;
+
+  public final Edit editAPI = new Edit();
+  public final Read readAPI = new Read();
+
+  public PackageAPI(CoreContainer coreContainer, PackageLoader loader) {
+    this.coreContainer = coreContainer;
+    this.packageLoader = loader;
+    pkgs = new Packages();
+    SolrZkClient zkClient = coreContainer.getZkController().getZkClient();
+    try {
+      registerListener(zkClient);
+    } catch (KeeperException | InterruptedException e) {
+      e.printStackTrace();
+    }
+  }
+
+  private void registerListener(SolrZkClient zkClient)
+      throws KeeperException, InterruptedException {
+    String path = SOLR_PKGS_PATH;
+    zkClient.exists(path,
+        new Watcher() {
+
+          @Override
+          public void process(WatchedEvent event) {
+            // session events are not change events, and do not remove the watcher
+            if (Event.EventType.None.equals(event.getType())) {
+              return;
+            }
+            try {
+              synchronized (this) {
+                log.debug("Updating [{}] ... ", path);
+
+                // remake watch
+                final Watcher thisWatch = this;
+                final Stat stat = new Stat();
+                final byte[] data = zkClient.getData(path, thisWatch, stat, true);
+                pkgs = readPkgsFromZk(data, stat);
+                packageLoader.refreshPackageConf();
+              }
+            } catch (KeeperException.ConnectionLossException | KeeperException.SessionExpiredException e) {
+              log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+            } catch (KeeperException e) {
+              log.error("A ZK error has occurred", e);
+              throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
+            } catch (InterruptedException e) {
+              // Restore the interrupted status
+              Thread.currentThread().interrupt();
+              log.warn("Interrupted", e);
+            }
+          }
+
+        }, true);
+  }
+
+
+  private Packages readPkgsFromZk(byte[] data, Stat stat) throws KeeperException, InterruptedException {
+
+    if (data == null || stat == null) {
+      stat = new Stat();
+      data = coreContainer.getZkController().getZkClient()
+          .getData(ZkStateReader.CLUSTER_PROPS, null, stat, true);
+
+    }
+    Packages packages = null;
+    if (data == null || data.length == 0) {
+      packages = new Packages();
+    } else {
+      try {
+        packages = mapper.readValue(data, Packages.class);
+        packages.znodeVersion = stat.getVersion();
+      } catch (IOException e) {
+        //invalid data in packages
+        //TODO handle properly;
+        return new Packages();
+      }
+    }
+    return packages;
+  }
+
+
+  public static class Packages implements ReflectMapWriter {
+    @JsonProperty
+    public int znodeVersion = -1;
+
+    @JsonProperty
+    public Map<String, List<PkgVersion>> packages = new LinkedHashMap<>();
+
+
+    public Packages copy() {
+      Packages p = new Packages();
+      p.znodeVersion = this.znodeVersion;
+      p.packages = new LinkedHashMap<>();
+      packages.forEach((s, versions) ->
+          p.packages.put(s, new ArrayList<>(versions)));
+      return p;
+    }
+  }
+
+  public static class PkgVersion implements ReflectMapWriter {
+
+    @JsonProperty
+    public String version;
+
+    @JsonProperty
+    public List<String> files;
+
+    public PkgVersion() {
+    }
+
+    public PkgVersion(AddVersion addVersion) {
+      this.version = addVersion.version;
+      this.files = addVersion.files;
+    }
+
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj instanceof PkgVersion) {
+        PkgVersion that = (PkgVersion) obj;
+        return Objects.equals(this.version, that.version)
+            && Objects.equals(this.files, that.files);
+
+      }
+      return false;
+    }
+  }
+
+
+  @EndPoint(method = SolrRequest.METHOD.POST,
+      path = "/cluster/package",
+      permission = PACKAGE_EDIT_PERM)
+  public class Edit {
+
+    @Command(name = "add")
+    public void add(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<AddVersion> payload) {
+      if (!checkEnabled(payload)) return;
+      AddVersion add = payload.get();
+      if (add.files.isEmpty()) {
+        payload.addError("No files specified");
+        return;
+      }
+      PackageStoreAPI packageStoreAPI = coreContainer.getPackageStoreAPI();
+      packageStoreAPI.validateFiles(add.files, true, s -> payload.addError(s));
+      if (payload.hasError()) return;
+      Packages[] finalState = new Packages[1];
+      try {
+        coreContainer.getZkController().getZkClient().atomicUpdate(SOLR_PKGS_PATH, (stat, bytes) -> {
+          Packages packages = null;
+          try {
+            packages = bytes == null ? new Packages() : mapper.readValue(bytes, Packages.class);
+            packages = packages.copy();
+          } catch (IOException e) {
+            log.error("Error deserializing packages.json", e);
+            packages = new Packages();
+          }
+          packages.packages.computeIfAbsent(add.pkg, Utils.NEW_ARRAYLIST_FUN).add(new PkgVersion(add));
+          packages.znodeVersion = stat.getVersion() + 1;
+          finalState[0] = packages;
+          return Utils.toJSON(packages);
+        });
+      } catch (KeeperException | InterruptedException e) {
+        finalState[0] = null;
+        handleZkErr(e);
+      }
+      if (finalState[0] != null) {
+//        succeeded in updating
+        pkgs = finalState[0];
+        notifyAllNodesToSync(pkgs.znodeVersion);
+        packageLoader.refreshPackageConf();
+      }
+
+    }
+
+    @Command(name = "delete")
+    public void del(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<DelVersion> payload) {
+      if (!checkEnabled(payload)) return;
+      DelVersion delVersion = payload.get();
+      try {
+        coreContainer.getZkController().getZkClient().atomicUpdate(SOLR_PKGS_PATH, (stat, bytes) -> {
+          Packages packages = null;
+          try {
+            packages = mapper.readValue(bytes, Packages.class);
+            packages = packages.copy();
+          } catch (IOException e) {
+            packages = new Packages();
+          }
+
+          List<PkgVersion> versions = packages.packages.get(delVersion.pkg);
+          if (versions == null || versions.isEmpty()) {
+            payload.addError("No such package: " + delVersion.pkg);
+            return null;// no change
+          }
+          int idxToremove = -1;
+          for (int i = 0; i < versions.size(); i++) {
+            if (Objects.equals(versions.get(i).version, delVersion.version)) {
+              idxToremove = i;
+              break;
+            }
+          }
+          if (idxToremove == -1) {
+            payload.addError("No such version: " + delVersion.version);
+            return null;
+          }
+          versions.remove(idxToremove);
+          packages.znodeVersion = stat.getVersion() + 1;
+          return Utils.toJSON(packages);
+        });
+      } catch (KeeperException | InterruptedException e) {
+        handleZkErr(e);
+
+      }
+
+
+    }
+
+  }
+
+  private boolean checkEnabled(CommandOperation payload) {
+    if (!enablePackages) {
+      payload.addError("Package loading is not enabled , Start your nodes with -Denable.packages=true");
+      return false;
+    }
+    return true;
+  }
+
+  @EndPoint(
+      method = SolrRequest.METHOD.GET,
+      path = {"/cluster/package/",
+          "/cluster/package/{name}"},
+      permission = PACKAGE_READ_PERM
+  )
+  public class Read {
+    @Command()
+    public void get(SolrQueryRequest req, SolrQueryResponse rsp) {
+      int expectedVersion = req.getParams().getInt("expectedVersion", -1);
+      if (expectedVersion != -1) {
+        syncToVersion(expectedVersion);
+      }
+      String name = req.getPathTemplateValues().get("name");
+      if (name == null) {
+        rsp.add("result", pkgs);
+      } else {
+        rsp.add("result", Collections.singletonMap(name, pkgs.packages.get(name)));
+      }
+    }
+
+    private void syncToVersion(int expectedVersion) {
+      for (int i = 0; i < 10; i++) {
+        log.debug("my version is {} , and expected version {}", pkgs.znodeVersion, expectedVersion);
+        if (pkgs.znodeVersion >= expectedVersion) {
+          return;
+        }
+        try {
+          Thread.sleep(10);
+        } catch (InterruptedException e) {
+        }
+        try {
+          pkgs = readPkgsFromZk(null, null);
+        } catch (KeeperException | InterruptedException e) {
+          handleZkErr(e);
+
+        }
+
+      }
+
+    }
+
+
+  }
+
+  public static class AddVersion implements ReflectMapWriter {
+    @JsonProperty(value = "package", required = true)
+    public String pkg;
+    @JsonProperty(required = true)
+    public String version;
+    @JsonProperty(required = true)
+    public List<String> files;
+
+  }
+
+  public static class DelVersion implements ReflectMapWriter {
+    @JsonProperty(value = "package", required = true)
+    public String pkg;
+    @JsonProperty(required = true)
+    public String version;
+
+  }
+
+  void notifyAllNodesToSync(int expected) {
+    for (String s : coreContainer.getPackageStoreAPI().shuffledNodes()) {
+      Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
+          coreContainer.getZkController().zkStateReader.getBaseUrlForNodeName(s).replace("/solr", "/api") + "/cluster/package?wt=javabin&omitHeader=true&expectedVersion" + expected,
+          Utils.JAVABINCONSUMER);
+    }
+  }
+
+  public void handleZkErr(Exception e) {
+    log.error("Error reading package config from zookeeper", SolrZkClient.checkInterrupted(e));
+  }
+
+
+}
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
new file mode 100644
index 0000000..c6ebae5
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageListeners.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.pkg;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.solr.core.PluginInfo;
+
+public class PackageListeners {
+  // this registry only keeps a weak reference because it does not want to
+  // cause a memory leak if the listener forgets to unregister itself
+  private List<WeakReference<Listener>> listeners = new ArrayList<>();
+
+  public synchronized void addListener(Listener listener) {
+    listeners.add(new WeakReference<>(listener));
+
+  }
+
+  public synchronized void removeListener(Listener listener) {
+    Iterator<WeakReference<Listener>> it = listeners.iterator();
+    while (it.hasNext()) {
+      WeakReference<Listener> ref = it.next();
+      Listener pkgListener = ref.get();
+      if(pkgListener == null || pkgListener == listener){
+        it.remove();
+      }
+
+    }
+
+  }
+
+  synchronized void packagesUpdated(List<PackageLoader.Package> pkgs){
+    for (PackageLoader.Package pkgInfo : pkgs) {
+      invokeListeners(pkgInfo);
+    }
+  }
+
+  private synchronized void invokeListeners(PackageLoader.Package pkg) {
+    for (WeakReference<Listener> ref : listeners) {
+      Listener listener = ref.get();
+      if (listener != null && listener.packageName().equals(pkg.name())) {
+        listener.changed(pkg);
+      }
+    }
+  }
+
+  public List<Listener> getListeners(){
+    List<Listener> result = new ArrayList<>();
+    for (WeakReference<Listener> ref : listeners) {
+      Listener l = ref.get();
+      if(l != null){
+        result.add(l);
+      }
+
+    }
+    return result;
+  }
+
+
+
+  public interface Listener {
+    String packageName();
+
+    PluginInfo pluginInfo();
+
+    void changed(PackageLoader.Package pkg);
+
+    PackageLoader.Package.Version getPackageVersion();
+
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
new file mode 100644
index 0000000..7efcc88
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.pkg;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.MalformedURLException;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.SolrResourceLoader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class PackageLoader {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final CoreContainer coreContainer;
+  private final Map<String, Package> packageClassLoaders = new ConcurrentHashMap<>();
+
+  private PackageAPI.Packages myCopy;
+
+  private PackageAPI packageAPI;
+
+
+  public PackageLoader(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+    packageAPI = new PackageAPI(coreContainer, this);
+    myCopy = packageAPI.pkgs;
+
+  }
+
+  public PackageAPI getPackageAPI() {
+    return packageAPI;
+  }
+
+  public Package getPackage(String key) {
+    return packageClassLoaders.get(key);
+  }
+
+  public Map<String, Package> getPackages() {
+    return Collections.EMPTY_MAP;
+  }
+
+  public void refreshPackageConf() {
+    log.info("{} updated to version {}", ZkStateReader.SOLR_PKGS_PATH, packageAPI.pkgs.znodeVersion);
+
+    List<Package> updated = new ArrayList<>();
+    Map<String, List<PackageAPI.PkgVersion>> modified = getModified(myCopy, packageAPI.pkgs);
+    for (Map.Entry<String, List<PackageAPI.PkgVersion>> e : modified.entrySet()) {
+      if (e.getValue() != null) {
+        Package p = packageClassLoaders.get(e.getKey());
+        if (e.getValue() != null && p == null) {
+          packageClassLoaders.put(e.getKey(), p = new Package(e.getKey()));
+        }
+        p.updateVersions(e.getValue());
+        updated.add(p);
+      } else {
+        Package p = packageClassLoaders.remove(e.getKey());
+        if (p != null) {
+          //other classes are holding to a reference to this objecec
+          // they should know that this is removed
+          p.markDeleted();
+        }
+      }
+    }
+    for (SolrCore core : coreContainer.getCores()) {
+      core.getPackageListeners().packagesUpdated(updated);
+
+    }
+  }
+
+  public Map<String, List<PackageAPI.PkgVersion>> getModified(PackageAPI.Packages old, PackageAPI.Packages newPkgs) {
+    Map<String, List<PackageAPI.PkgVersion>> changed = new HashMap<>();
+    for (Map.Entry<String, List<PackageAPI.PkgVersion>> e : newPkgs.packages.entrySet()) {
+      List<PackageAPI.PkgVersion> versions = old.packages.get(e.getKey());
+      if (versions != null) {
+        if (!Objects.equals(e.getValue(), versions)) {
+          changed.put(e.getKey(), e.getValue());
+        }
+      } else {
+        changed.put(e.getKey(), e.getValue());
+      }
+    }
+    //some packages are deleted altogether
+    for (String s : old.packages.keySet()) {
+      if (!newPkgs.packages.keySet().contains(s)) {
+        changed.put(s, null);
+      }
+    }
+
+    return changed;
+
+  }
+
+  public SolrResourceLoader getResourceLoader(String pkg, String version) {
+    return null;
+  }
+
+
+  public class Package {
+    final String name;
+    final Map<String, Version> myVersions = new ConcurrentHashMap<>();
+    private List<String> sortedVersions = new CopyOnWriteArrayList<>();
+    String latest;
+    private boolean deleted;
+
+
+    public Package(String name) {
+      this.name = name;
+    }
+
+    public boolean isDeleted() {
+      return deleted;
+    }
+
+
+    private synchronized void updateVersions(List<PackageAPI.PkgVersion> modified) {
+      for (PackageAPI.PkgVersion v : modified) {
+        Version version = myVersions.get(v.version);
+        if (version == null) {
+          myVersions.put(v.version, new Version(this, v));
+          sortedVersions.add(v.version);
+        }
+      }
+
+      Set<String> newVersions = new HashSet<>();
+      for (PackageAPI.PkgVersion v : modified) {
+        newVersions.add(v.version);
+      }
+      for (String s : new HashSet<>(myVersions.keySet())) {
+        if (!newVersions.contains(s)) {
+          sortedVersions.remove(s);
+          myVersions.remove(s);
+        }
+      }
+
+      sortedVersions.sort(String::compareTo);
+      if (sortedVersions.size() > 0) {
+        latest = sortedVersions.get(sortedVersions.size() - 1);
+      } else {
+        latest = null;
+      }
+
+    }
+
+
+    public Version getLatest() {
+      return latest == null ? null : myVersions.get(latest);
+    }
+
+    public Version getLatest(String lessThan) {
+      String latest = null;
+      for (String v : (ArrayList<String>) new ArrayList(sortedVersions)) {
+        if (v.compareTo(lessThan) < 1) {
+          latest = v;
+        } else break;
+
+      }
+
+
+      return latest == null ? null : myVersions.get(latest);
+    }
+
+    public String name() {
+      return name;
+    }
+
+    private void markDeleted() {
+      deleted = true;
+      myVersions.clear();
+      sortedVersions.clear();
+      latest = null;
+
+    }
+
+    public class Version implements MapWriter {
+      private final Package parent;
+      private SolrResourceLoader loader;
+
+      private final PackageAPI.PkgVersion version;
+
+      @Override
+      public void writeMap(EntryWriter ew) throws IOException {
+        ew.put("package", parent.name());
+        version.writeMap(ew);
+      }
+
+      public Version(Package parent, PackageAPI.PkgVersion v) {
+        this.parent = parent;
+        this.version = v;
+        List<Path> paths = new ArrayList<>();
+        for (String file : version.files) {
+          paths.add(coreContainer.getPackageStoreAPI().getPackageStore().getRealpath(file));
+        }
+
+        try {
+          loader = new SolrResourceLoader(
+              "PACKAGE_LOADER:"+ parent.name()+ ":"+ version,
+              paths,
+              coreContainer.getResourceLoader().getInstancePath(),
+              coreContainer.getResourceLoader().getClassLoader());
+        } catch (MalformedURLException e) {
+          log.error("Could not load classloader ", e);
+        }
+      }
+
+      public String getVersion() {
+        return version.version;
+      }
+
+      public Collection getFiles() {
+        return Collections.unmodifiableList(version.files);
+      }
+
+      public SolrResourceLoader getLoader() {
+        return loader;
+
+      }
+
+    }
+  }
+
+
+}
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
new file mode 100644
index 0000000..f0364c4
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/pkg/PackagePluginHolder.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.pkg;
+
+import java.lang.invoke.MethodHandles;
+
+import org.apache.solr.core.PluginBag;
+import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrConfig;
+import org.apache.solr.core.SolrCore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class PackagePluginHolder<T> extends PluginBag.PluginHolder<T> {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final SolrCore core;
+  private final SolrConfig.SolrPluginInfo pluginMeta;
+  private PackageLoader.Package aPackage;
+  private PackageLoader.Package.Version pkgVersion;
+
+
+  public PackagePluginHolder(PluginInfo info, SolrCore core, SolrConfig.SolrPluginInfo pluginMeta) {
+    super(info);
+    this.core = core;
+    this.pluginMeta = pluginMeta;
+
+    reload(aPackage = core.getCoreContainer().getPackageLoader().getPackage(info.pkgName));
+    core.getPackageListeners().addListener(new PackageListeners.Listener() {
+      @Override
+      public String packageName() {
+        return info.pkgName;
+      }
+
+      @Override
+      public PluginInfo pluginInfo() {
+        return info;
+      }
+
+      @Override
+      public void changed(PackageLoader.Package pkg) {
+        reload(pkg);
+
+      }
+
+      @Override
+      public PackageLoader.Package.Version getPackageVersion() {
+        return pkgVersion;
+      }
+
+    });
+  }
+
+
+  private synchronized void reload(PackageLoader.Package pkg) {
+    if(pkgVersion != null && aPackage.getLatest() == pkgVersion ) return;
+
+    if (inst != null) log.info("reloading plugin {} ", pluginInfo.name);
+    PackageLoader.Package.Version newest = pkg.getLatest();
+    if(newest == null) return;
+    Object instance = SolrCore.createInstance(pluginInfo.className,
+        pluginMeta.clazz, pluginMeta.getCleanTag(), core, newest.getLoader());
+    PluginBag.initInstance(instance, pluginInfo);
+    T old = inst;
+    inst = (T) instance;
+    pkgVersion = newest;
+    if (old instanceof AutoCloseable) {
+      AutoCloseable closeable = (AutoCloseable) old;
+      try {
+        closeable.close();
+      } catch (Exception e) {
+        log.error("error closing plugin", e);
+      }
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
index a4c7c0d..b5d4099 100644
--- a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
+++ b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
@@ -53,6 +53,9 @@ public interface PermissionNameProvider {
     METRICS_HISTORY_READ_PERM("metrics-history-read", null),
     FILESTORE_READ_PERM("filestore-read", null),
     FILESTORE_WRITE_PERM("filestore-write", null),
+    PACKAGE_EDIT_PERM("package-edit", null),
+    PACKAGE_READ_PERM("package-read", null),
+
     ALL("all", unmodifiableSet(new HashSet<>(asList("*", null))))
     ;
     final String name;
diff --git a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
new file mode 100644
index 0000000..5de69cf
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
@@ -0,0 +1,391 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.pkg;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.cloud.ConfigRequest;
+import org.apache.solr.cloud.MiniSolrCloudCluster;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.MapWriterMap;
+import org.apache.solr.common.NavigableObject;
+import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.filestore.TestDistribPackageStore;
+import org.apache.solr.util.LogLevel;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.data.Stat;
+import org.junit.Test;
+
+import static org.apache.solr.common.cloud.ZkStateReader.SOLR_PKGS_PATH;
+import static org.apache.solr.common.params.CommonParams.JAVABIN;
+import static org.apache.solr.common.params.CommonParams.WT;
+import static org.apache.solr.core.TestDynamicLoading.getFileContent;
+import static org.apache.solr.filestore.TestDistribPackageStore.readFile;
+
+@LogLevel("org.apache.solr.pkg.PackageLoader=DEBUG;org.apache.solr.pkg.PackageAPI=DEBUG")
+public class TestPackages extends SolrCloudTestCase {
+
+  @Test
+  public void testPluginLoading() throws Exception {
+    System.setProperty("enable.packages", "true");
+    MiniSolrCloudCluster cluster =
+        configureCluster(4)
+            .withJettyConfig(jetty -> jetty.enableV2(true))
+            .addConfig("conf", configset("cloud-minimal"))
+            .configure();
+    try {
+      String FILE1 = "/mypkg/runtimelibs.jar";
+      String FILE2 = "/mypkg/runtimelibs_v2.jar";
+      String COLLECTION_NAME = "testPluginLoadingColl";
+      byte[] derFile = readFile("cryptokeys/pub_key512.der");
+      cluster.getZkClient().makePath("/keys/exe", true);
+      cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
+      postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1,
+          "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==");
+
+      PackageAPI.AddVersion add = new PackageAPI.AddVersion();
+      add.version = "1.0";
+      add.pkg = "mypkg";
+      add.files = Arrays.asList(new String[]{FILE1});
+      V2Request req = new V2Request.Builder("/cluster/package")
+          .forceV2(true)
+          .withMethod(SolrRequest.METHOD.POST)
+          .withPayload(Collections.singletonMap("add", add))
+          .build();
+
+      req.process(cluster.getSolrClient());
+
+
+      CollectionAdminRequest
+          .createCollection(COLLECTION_NAME, "conf", 2, 2)
+          .setMaxShardsPerNode(100)
+          .process(cluster.getSolrClient());
+      cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4);
+
+      TestDistribPackageStore.assertResponseValues(10,
+          () -> new V2Request.Builder("/cluster/package").
+              withMethod(SolrRequest.METHOD.GET)
+              .build().process(cluster.getSolrClient()),
+          Utils.makeMap(
+              ":result:packages:mypkg[0]:version", "1.0",
+              ":result:packages:mypkg[0]:files[0]", FILE1
+          ));
+
+      String payload = "{\n" +
+          "'create-requesthandler' : { 'name' : '/runtime', 'class': 'mypkg:org.apache.solr.core.RuntimeLibReqHandler' }," +
+          "'create-searchcomponent' : { 'name' : 'get', 'class': 'mypkg:org.apache.solr.core.RuntimeLibSearchComponent'  }," +
+          "'create-queryResponseWriter' : { 'name' : 'json1', 'class': 'mypkg:org.apache.solr.core.RuntimeLibResponseWriter' }" +
+          "}";
+      cluster.getSolrClient().request(new ConfigRequest(payload) {
+        @Override
+        public String getCollection() {
+          return COLLECTION_NAME;
+        }
+      });
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "queryResponseWriter", "json1",
+          "mypkg", "1.0" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "searchComponent", "get",
+          "mypkg", "1.0" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "requestHandler", "/runtime",
+          "mypkg", "1.0" );
+
+
+
+      executeReq( "/" + COLLECTION_NAME + "/runtime?wt=javabin", cluster.getRandomJetty(random()),
+          Utils.JAVABINCONSUMER,
+          Utils.makeMap("class", "org.apache.solr.core.RuntimeLibReqHandler"));
+
+      executeReq( "/" + COLLECTION_NAME + "/get?wt=json", cluster.getRandomJetty(random()),
+          Utils.JSONCONSUMER,
+          Utils.makeMap("class", "org.apache.solr.core.RuntimeLibSearchComponent",
+              "Version","1"));
+
+
+      executeReq( "/" + COLLECTION_NAME + "/runtime?wt=json1", cluster.getRandomJetty(random()),
+          Utils.JSONCONSUMER,
+          Utils.makeMap("wt", "org.apache.solr.core.RuntimeLibResponseWriter"));
+
+      //now upload the second jar
+      postFileAndWait(cluster, "runtimecode/runtimelibs_v2.jar.bin", FILE2,
+          "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA==");
+
+      //add the version using package API
+      add.version = "1.1";
+      add.pkg = "mypkg";
+      add.files = Arrays.asList(new String[]{FILE2});
+      req.process(cluster.getSolrClient());
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "queryResponseWriter", "json1",
+          "mypkg", "1.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "searchComponent", "get",
+          "mypkg", "1.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "requestHandler", "/runtime",
+          "mypkg", "1.1" );
+
+      /*executeReq( "/" + COLLECTION_NAME + "/get?wt=json", cluster.getRandomJetty(random()),
+          Utils.JSONCONSUMER,
+          Utils.makeMap("class", "org.apache.solr.core.RuntimeLibSearchComponent",
+              "Version","2"));
+
+      PackageAPI.DelVersion delVersion = new PackageAPI.DelVersion();
+      delVersion.pkg = "mypkg";
+      delVersion.version = "1.1";
+      new V2Request.Builder("/cluster/package")
+          .withMethod(SolrRequest.METHOD.POST)
+          .forceV2(true)
+          .withPayload(delVersion)
+          .build()
+          .process(cluster.getSolrClient());
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "queryResponseWriter", "json1",
+          "mypkg", "1.0" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "searchComponent", "get",
+          "mypkg", "1.0" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "requestHandler", "/runtime",
+          "mypkg", "1.0" );
+
+*/
+    } finally {
+      cluster.shutdown();
+    }
+
+  }
+
+  private void executeReq(String uri, JettySolrRunner jetty, Utils.InputStreamConsumer parser, Map expected) throws Exception {
+    try(HttpSolrClient client = (HttpSolrClient) jetty.newClient()){
+      TestDistribPackageStore.assertResponseValues(10,
+          () -> {
+            Object o = Utils.executeGET(client.getHttpClient(),
+                jetty.getBaseUrl() + uri, parser);
+            if(o instanceof NavigableObject) return (NavigableObject) o;
+            if(o instanceof Map) return new MapWriterMap((Map) o);
+            throw new RuntimeException("Unknown response");
+          }, expected);
+
+    }
+  }
+
+  private void verifyCmponent(SolrClient client, String COLLECTION_NAME,
+  String componentType, String componentName, String pkg, String version) throws Exception {
+    SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+        WT, JAVABIN,
+        "componentName", componentName,
+        "meta", "true"));
+
+    String s = "queryResponseWriter";
+    GenericSolrRequest req1 = new GenericSolrRequest(SolrRequest.METHOD.GET,
+        "/config/" + componentType, params);
+    TestDistribPackageStore.assertResponseValues(10,
+        client,
+        req1, Utils.makeMap(
+            ":config:" + componentType + ":" + componentName + ":_packageinfo_:package", pkg,
+            ":config:" + componentType + ":" + componentName + ":_packageinfo_:version", version
+        ));
+  }
+
+  @Test
+  public void testAPI() throws Exception {
+    System.setProperty("enable.packages", "true");
+    MiniSolrCloudCluster cluster =
+        configureCluster(4)
+            .withJettyConfig(jetty -> jetty.enableV2(true))
+            .addConfig("conf", configset("cloud-minimal"))
+            .configure();
+    try {
+      String errPath = "/error/details[0]/errorMessages[0]";
+      String FILE1 = "/mypkg/v.0.12/jar_a.jar";
+      String FILE2 = "/mypkg/v.0.12/jar_b.jar";
+      String FILE3 = "/mypkg/v.0.13/jar_a.jar";
+
+      PackageAPI.AddVersion add = new PackageAPI.AddVersion();
+      add.version = "0.12";
+      add.pkg = "test_pkg";
+      add.files = Arrays.asList(new String[]{FILE1, FILE2});
+      V2Request req = new V2Request.Builder("/cluster/package")
+          .forceV2(true)
+          .withMethod(SolrRequest.METHOD.POST)
+          .withPayload(Collections.singletonMap("add", add))
+          .build();
+
+      //the files is not yet there. The command should fail with error saying "No such file"
+      expectError(req, cluster.getSolrClient(), errPath, "No such file :");
+
+
+      //post the jar file. No signature is sent
+      postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1, null);
+
+
+      add.files = Arrays.asList(new String[]{FILE1});
+      expectError(req, cluster.getSolrClient(), errPath,
+          FILE1 + " has no signature");
+      //now we upload the keys
+      byte[] derFile = readFile("cryptokeys/pub_key512.der");
+      cluster.getZkClient().makePath("/keys/exe", true);
+      cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
+      //and upload the same file with a different name but it has proper signature
+      postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE2,
+          "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==");
+      // with correct signature
+      //after uploading the file, let's delete the keys to see if we get proper error message
+      cluster.getZkClient().delete("/keys/exe/pub_key512.der", -1, true);
+      add.files = Arrays.asList(new String[]{FILE2});
+      expectError(req, cluster.getSolrClient(), errPath,
+          "ZooKeeper does not have any public keys");
+
+      //Now lets' put the keys back
+      cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
+
+      //this time we have a file with proper signature, public keys are in ZK
+      // so the add {} command should succeed
+      req.process(cluster.getSolrClient());
+
+      //Now verify the data in ZK
+      TestDistribPackageStore.assertResponseValues(1,
+          () -> new MapWriterMap((Map) Utils.fromJSON(cluster.getZkClient().getData(SOLR_PKGS_PATH,
+              null, new Stat(), true))),
+          Utils.makeMap(
+              ":packages:test_pkg[0]:version", "0.12",
+              ":packages:test_pkg[0]:files[0]", FILE1
+          ));
+
+      //post a new jar with a proper signature
+      postFileAndWait(cluster, "runtimecode/runtimelibs_v2.jar.bin", FILE3,
+          "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA==");
+
+
+      //this time we are adding the second version of the package (0.13)
+      add.version = "0.13";
+      add.pkg = "test_pkg";
+      add.files = Arrays.asList(new String[]{FILE3});
+
+      //this request should succeed
+      req.process(cluster.getSolrClient());
+      //no verify the data (/packages.json) in ZK
+      TestDistribPackageStore.assertResponseValues(1,
+          () -> new MapWriterMap((Map) Utils.fromJSON(cluster.getZkClient().getData(SOLR_PKGS_PATH,
+              null, new Stat(), true))),
+          Utils.makeMap(
+              ":packages:test_pkg[1]:version", "0.13",
+              ":packages:test_pkg[1]:files[0]", FILE3
+          ));
+
+      //Now we will just delete one version
+      PackageAPI.DelVersion delVersion = new PackageAPI.DelVersion();
+      delVersion.version = "0.1";//this version does not exist
+      delVersion.pkg = "test_pkg";
+      req = new V2Request.Builder("/cluster/package")
+          .forceV2(true)
+          .withMethod(SolrRequest.METHOD.POST)
+          .withPayload(Collections.singletonMap("delete", delVersion))
+          .build();
+
+      //we are expecting an error
+      expectError(req, cluster.getSolrClient(), errPath, "No such version:");
+
+      delVersion.version = "0.12";//correct version. Should succeed
+      req.process(cluster.getSolrClient());
+      //Verify with ZK that the data is correcy
+      TestDistribPackageStore.assertResponseValues(1,
+          () -> new MapWriterMap((Map) Utils.fromJSON(cluster.getZkClient().getData(SOLR_PKGS_PATH,
+              null, new Stat(), true))),
+          Utils.makeMap(
+              ":packages:test_pkg[0]:version", "0.13",
+              ":packages:test_pkg[0]:files[0]", FILE2
+          ));
+
+
+      //So far we have been verifying the details with  ZK directly
+      //use the package read API to verify with each node that it has the correct data
+      for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+        String path = jetty.getBaseUrl().toString().replace("/solr", "/api") + "/cluster/package?wt=javabin";
+        TestDistribPackageStore.assertResponseValues(10, new Callable<NavigableObject>() {
+          @Override
+          public NavigableObject call() throws Exception {
+            try (HttpSolrClient solrClient = (HttpSolrClient) jetty.newClient()) {
+              return (NavigableObject) Utils.executeGET(solrClient.getHttpClient(), path, Utils.JAVABINCONSUMER);
+            }
+          }
+        }, Utils.makeMap(
+            ":result:packages:test_pkg[0]:version", "0.13",
+            ":result:packages:test_pkg[0]:files[0]", FILE3
+        ));
+      }
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+  static void postFileAndWait(MiniSolrCloudCluster cluster, String fname, String path, String sig) throws Exception {
+    ByteBuffer fileContent = getFileContent(fname);
+    String sha512 = DigestUtils.sha512Hex(fileContent.array());
+
+    TestDistribPackageStore.postFile(cluster.getSolrClient(),
+        fileContent,
+        path, sig);// has file, but no signature
+
+    TestDistribPackageStore.waitForAllNodesHaveFile(cluster, path, Utils.makeMap(
+        ":files:" + path + ":sha512",
+        sha512
+    ), false);
+  }
+
+  private void expectError(V2Request req, SolrClient client, String errPath, String expectErrorMsg) throws IOException, SolrServerException {
+    try {
+      req.process(client);
+      fail("should have failed with message : " + expectErrorMsg);
+    } catch (BaseHttpSolrClient.RemoteExecutionException e) {
+      String msg = e.getMetaData()._getStr(errPath, "");
+      assertTrue("should have failed with message: " + expectErrorMsg + "actual message : " + msg,
+          msg.contains(expectErrorMsg)
+      );
+    }
+  }
+}
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
index 98f0d9a..b8105ab 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
@@ -34,6 +34,7 @@ import java.nio.file.Path;
 import java.util.List;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.RejectedExecutionException;
+import java.util.function.BiFunction;
 import java.util.function.Function;
 import java.util.function.Predicate;
 import java.util.regex.Pattern;
@@ -353,6 +354,10 @@ public class SolrZkClient implements Closeable {
   }
 
   public void atomicUpdate(String path, Function<byte[], byte[]> editor) throws KeeperException, InterruptedException {
+   atomicUpdate(path, (stat, bytes) -> editor.apply(bytes));
+  }
+
+  public void atomicUpdate(String path, BiFunction<Stat , byte[], byte[]> editor) throws KeeperException, InterruptedException {
     for (; ; ) {
       byte[] modified = null;
       byte[] zkData = null;
@@ -360,7 +365,7 @@ public class SolrZkClient implements Closeable {
       try {
         if (exists(path, true)) {
           zkData = getData(path, null, s, true);
-          modified = editor.apply(zkData);
+          modified = editor.apply(s, zkData);
           if (modified == null) {
             //no change , no need to persist
             return;
@@ -368,7 +373,7 @@ public class SolrZkClient implements Closeable {
           setData(path, modified, s.getVersion(), true);
           break;
         } else {
-          modified = editor.apply(null);
+          modified = editor.apply(s,null);
           if (modified == null) {
             //no change , no need to persist
             return;
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index dcf7d9e..5dea5b0 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -118,6 +118,7 @@ public class ZkStateReader implements SolrCloseable {
   public static final String SOLR_AUTOSCALING_TRIGGER_STATE_PATH = "/autoscaling/triggerState";
   public static final String SOLR_AUTOSCALING_NODE_ADDED_PATH = "/autoscaling/nodeAdded";
   public static final String SOLR_AUTOSCALING_NODE_LOST_PATH = "/autoscaling/nodeLost";
+  public static final String SOLR_PKGS_PATH = "/packages.json";
 
   public static final String DEFAULT_SHARD_PREFERENCES = "defaultShardPreferences";
   public static final String REPLICATION_FACTOR = "replicationFactor";


[lucene-solr] 07/10: add a 'refresh' command to force refresh loading of packages

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit d19d1f1e47ce94027f17ab48416a5d62d8938e84
Author: noble <no...@apache.org>
AuthorDate: Mon Oct 21 22:05:33 2019 +1100

    add a 'refresh' command to force refresh loading of packages
---
 .../src/java/org/apache/solr/pkg/PackageAPI.java   | 31 +++++++++++++++++++-
 .../java/org/apache/solr/pkg/PackageLoader.java    | 10 +++++++
 .../src/test/org/apache/solr/pkg/TestPackages.java | 34 ++++++++++++++++++++--
 3 files changed, 72 insertions(+), 3 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index fa9f952..f9084b4 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -204,6 +204,29 @@ public class PackageAPI {
       permission = PACKAGE_EDIT_PERM)
   public class Edit {
 
+    @Command(name = "refresh")
+    public void refresh(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<String> payload) {
+      String p = payload.get();
+      if (p == null) {
+        payload.addError("Package null");
+        return;
+      }
+      PackageLoader.Package pkg = coreContainer.getPackageLoader().getPackage(p);
+      if (pkg == null) {
+        payload.addError("No such package: " + p);
+        return;
+      }
+
+      for (String s : coreContainer.getPackageStoreAPI().shuffledNodes()) {
+        Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
+            coreContainer.getZkController().zkStateReader.getBaseUrlForNodeName(s).replace("/solr", "/api") + "/cluster/package?wt=javabin&omitHeader=true&refreshPackage=" + p,
+            Utils.JAVABINCONSUMER);
+      }
+
+
+    }
+
+
     @Command(name = "add")
     public void add(SolrQueryRequest req, SolrQueryResponse rsp, PayloadObj<AddVersion> payload) {
       if (!checkEnabled(payload)) return;
@@ -305,6 +328,12 @@ public class PackageAPI {
   public class Read {
     @Command()
     public void get(SolrQueryRequest req, SolrQueryResponse rsp) {
+      String refresh = req.getParams().get("refreshPackage");
+      if (refresh != null) {
+        packageLoader.notifyListeners(refresh);
+        return;
+      }
+
       int expectedVersion = req.getParams().getInt("expectedVersion", -1);
       if (expectedVersion != -1) {
         syncToVersion(expectedVersion);
@@ -322,7 +351,7 @@ public class PackageAPI {
       for (int i = 0; i < 10; i++) {
         log.debug("my version is {} , and expected version {}", pkgs.znodeVersion, expectedVersion);
         if (pkgs.znodeVersion >= expectedVersion) {
-          if(origVersion < pkgs.znodeVersion){
+          if (origVersion < pkgs.znodeVersion) {
             packageLoader.refreshPackageConf();
           }
           return;
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
index fb62a5f..c86e433 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageLoader.java
@@ -128,6 +128,16 @@ public class PackageLoader {
 
   }
 
+  public void notifyListeners(String pkg) {
+    Package p = packageClassLoaders.get(pkg);
+    if(p != null){
+      List<Package> l = Collections.singletonList(p);
+      for (SolrCore core : coreContainer.getCores()) {
+        core.getPackageListeners().packagesUpdated(l);
+      }
+    }
+  }
+
   /**
    * represents a package definition in the packages.json
    */
diff --git a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
index 92417d4..ddbf39a 100644
--- a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
+++ b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java
@@ -245,13 +245,15 @@ public class TestPackages extends SolrCloudTestCase {
           return new RequestWriter.StringPayloadContentWriter("{set:{PKG_VERSIONS:{mypkg : '1.1'}}}",
               ClientUtils.TEXT_JSON);
         }
-      }
-          .process(cluster.getSolrClient()) ;
+      }.process(cluster.getSolrClient()) ;
 
       add.version = "2.1";
       add.files = Arrays.asList(new String[]{FILE3});
       req.process(cluster.getSolrClient());
 
+      //the collections mypkg is set to use version 1.1
+      //so no upgrade
+
       verifyCmponent(cluster.getSolrClient(),
           COLLECTION_NAME, "queryResponseWriter", "json1",
           "mypkg", "1.1" );
@@ -264,7 +266,35 @@ public class TestPackages extends SolrCloudTestCase {
           COLLECTION_NAME, "requestHandler", "/runtime",
           "mypkg", "1.1" );
 
+      new GenericSolrRequest(SolrRequest.METHOD.POST, "/config/params", params ){
+        @Override
+        public RequestWriter.ContentWriter getContentWriter(String expectedType) {
+          return new RequestWriter.StringPayloadContentWriter("{set:{PKG_VERSIONS:{mypkg : '2.1'}}}",
+              ClientUtils.TEXT_JSON);
+        }
+      }.process(cluster.getSolrClient()) ;
+
+      //now, let's force every collection using 'mypkg' to refresh
+      //so that it uses version 2.1
+      new V2Request.Builder("/cluster/package")
+          .withMethod(SolrRequest.METHOD.POST)
+          .withPayload("{refresh : mypkg}")
+          .forceV2(true)
+          .build()
+          .process(cluster.getSolrClient());
+
 
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "queryResponseWriter", "json1",
+          "mypkg", "2.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "searchComponent", "get",
+          "mypkg", "2.1" );
+
+      verifyCmponent(cluster.getSolrClient(),
+          COLLECTION_NAME, "requestHandler", "/runtime",
+          "mypkg", "2.1" );
 
     } finally {
       cluster.shutdown();


[lucene-solr] 10/10: Fix merge conflicts

Posted by is...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ishan pushed a commit to branch jira/solr-13662-2
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 231b01a7af8c292fba6742cb4d79fce7b82536ff
Author: Ishan Chattopadhyaya <is...@apache.org>
AuthorDate: Tue Oct 29 04:28:14 2019 +0530

    Fix merge conflicts
---
 .../java/org/apache/solr/core/CoreContainer.java   |  4 ---
 .../solr/packagemanager/SolrUpdateManager.java     |  4 +--
 .../src/java/org/apache/solr/pkg/PackageAPI.java   | 30 ++--------------------
 .../solr/client/solrj/request/beans/Package.java   |  4 +++
 4 files changed, 8 insertions(+), 34 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 1d562c3..4999908 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -746,10 +746,6 @@ public class CoreContainer {
       containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().editAPI), Collections.EMPTY_MAP);
       containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().readAPI), Collections.EMPTY_MAP);
     }
-    packageLoader = new PackageLoader(this);
-    containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().editAPI), Collections.EMPTY_MAP);
-    containerHandlers.getApiBag().register(new AnnotatedApi(packageLoader.getPackageAPI().readAPI), Collections.EMPTY_MAP);
-
 
 
     // setup executor to load cores in parallel
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/SolrUpdateManager.java b/solr/core/src/java/org/apache/solr/packagemanager/SolrUpdateManager.java
index c4abf5c..78a448a 100644
--- a/solr/core/src/java/org/apache/solr/packagemanager/SolrUpdateManager.java
+++ b/solr/core/src/java/org/apache/solr/packagemanager/SolrUpdateManager.java
@@ -32,6 +32,7 @@ import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.client.solrj.request.beans.Package;
 import org.apache.solr.client.solrj.response.V2Response;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -42,7 +43,6 @@ import org.apache.solr.packagemanager.pf4j.FileDownloader;
 import org.apache.solr.packagemanager.pf4j.FileVerifier;
 import org.apache.solr.packagemanager.pf4j.PackageManagerException;
 import org.apache.solr.packagemanager.pf4j.SimpleFileDownloader;
-import org.apache.solr.pkg.PackageAPI;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -272,7 +272,7 @@ public class SolrUpdateManager {
       e.printStackTrace();
     }*/
     
-    PackageAPI.AddVersion add = new PackageAPI.AddVersion();
+    Package.AddVersion add = new Package.AddVersion();
     add.version = version;
     add.pkg = id;
     add.files = Arrays.asList(files);
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index d38a486..6720bee 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -176,19 +176,14 @@ public class PackageAPI {
     @JsonProperty
     public String manifestSHA512;
 
-    public PkgVersion(AddVersion addVersion) {
-      this.version = addVersion.version;
-      this.files = addVersion.files;
-      this.manifest = addVersion.manifest;
-      this.manifestSHA512 = addVersion.manifestSHA512; // nocommit: compute and populate here
-    }
-
     public PkgVersion() {
     }
 
     public PkgVersion(Package.AddVersion addVersion) {
       this.version = addVersion.version;
       this.files = addVersion.files;
+      this.manifest = addVersion.manifest;
+      this.manifestSHA512 = addVersion.manifestSHA512;
     }
 
 
@@ -380,27 +375,6 @@ public class PackageAPI {
 
   }
 
-  public static class AddVersion implements ReflectMapWriter {
-    @JsonProperty(value = "package", required = true)
-    public String pkg;
-    @JsonProperty(required = true)
-    public String version;
-    @JsonProperty(required = true)
-    public List<String> files;
-    @JsonProperty(required = false)
-    public String manifestSHA512;
-    @JsonProperty(required = false)
-    public String manifest;
-  }
-
-  public static class DelVersion implements ReflectMapWriter {
-    @JsonProperty(value = "package", required = true)
-    public String pkg;
-    @JsonProperty(required = true)
-    public String version;
-
-  }
-
   void notifyAllNodesToSync(int expected) {
     for (String s : coreContainer.getPackageStoreAPI().shuffledNodes()) {
       Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/Package.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/Package.java
index 62bf6ec..0e34e75 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/Package.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/Package.java
@@ -33,6 +33,10 @@ public class Package {
     public String version;
     @JsonProperty(required = true)
     public List<String> files;
+    @JsonProperty
+    public String manifest;
+    @JsonProperty
+    public String manifestSHA512;
 
   }