You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ma...@apache.org on 2020/10/01 07:36:24 UTC

[lucene-solr] 04/06: @878 Enable some more tests.

This is an automated email from the ASF dual-hosted git repository.

markrmiller pushed a commit to branch reference_impl_dev
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 8521c7e4aade306f91a2f59494b24bd50d48d541
Author: markrmiller@gmail.com <ma...@gmail.com>
AuthorDate: Thu Oct 1 00:10:49 2020 -0500

    @878 Enable some more tests.
---
 settings.gradle                                    |   3 +
 .../java/org/apache/solr/cloud/ZkController.java   |   6 +-
 .../OverseerCollectionMessageHandler.java          | 155 ++++++++--------
 .../solr/cloud/api/collections/SplitShardCmd.java  |   2 +-
 .../java/org/apache/solr/core/CoreContainer.java   |   4 +-
 .../src/java/org/apache/solr/core/SolrCore.java    |  18 +-
 .../org/apache/solr/core/SolrResourceLoader.java   |   6 +-
 .../src/java/org/apache/solr/core/ZkContainer.java |   2 +-
 .../solr/handler/admin/MetricsHistoryHandler.java  |   2 +-
 .../apache/solr/handler/admin/PrepRecoveryOp.java  |  16 +-
 .../solr/handler/component/HttpShardHandler.java   |   5 +-
 .../solr/response/QueryResponseWriterUtil.java     |   4 +-
 .../apache/solr/response/XSLTResponseWriter.java   |   2 +-
 .../apache/solr/servlet/SolrDispatchFilter.java    |  14 +-
 .../apache/solr/servlet/SolrShutdownHandler.java   |  80 +++++----
 .../src/test/org/apache/solr/CursorPagingTest.java |   4 +-
 .../test/org/apache/solr/TestRandomFaceting.java   |  14 +-
 .../client/solrj/impl/ConnectionReuseTest.java     | 199 ---------------------
 .../org/apache/solr/cloud/DeleteReplicaTest.java   |  10 +-
 .../solr/cloud/MetricsHistoryIntegrationTest.java  |   5 +-
 .../CollectionsAPIAsyncDistributedZkTest.java      |  11 +-
 .../org/apache/solr/core/TestDynamicLoading.java   |   2 +-
 .../test/org/apache/solr/core/TestLazyCores.java   |   2 +-
 .../org/apache/solr/handler/V2StandaloneTest.java  |   7 +-
 .../solr/handler/admin/SplitHandlerTest.java       |   1 +
 .../apache/solr/request/TestRemoteStreaming.java   |  11 +-
 .../solr/client/solrj/impl/Http2SolrClient.java    |  10 +-
 .../solr/client/solrj/request/V2Request.java       |   2 +-
 .../src/java/org/apache/solr/SolrTestCase.java     |   2 +-
 .../src/java/org/apache/solr/util/TestHarness.java |   4 +-
 30 files changed, 200 insertions(+), 403 deletions(-)

diff --git a/settings.gradle b/settings.gradle
index e9566d5..172718b 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -74,3 +74,6 @@ include "solr:example"
 include "solr:packaging"
 include "solr:docker"
 include "solr:docker:package"
+
+include "solr:benchmark"
+
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 1822f48..ac0fbc2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -86,8 +86,6 @@ import org.apache.zookeeper.KeeperException.SessionExpiredException;
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
 import org.apache.zookeeper.data.Stat;
-import org.eclipse.jetty.server.ShutdownMonitor;
-import org.eclipse.jetty.util.component.LifeCycle;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -683,6 +681,8 @@ public class ZkController implements Closeable, Runnable {
       IOUtils.closeQuietly(zkClient);
     }
 
+    SolrShutdownHandler.removeShutdown(this);
+
     assert ObjectReleaseTracker.release(this);
   }
 
@@ -1554,7 +1554,7 @@ public class ZkController implements Closeable, Runnable {
       // the watcher is added to a set so multiple calls of this method will left only one watcher
 
       // nocommit
-      //registerUnloadWatcher(cloudDesc.getCollectionName(), cloudDesc.getShardId(), cloudDesc.getCoreNodeName(), desc.getName());
+      registerUnloadWatcher(cloudDesc.getCollectionName(), cloudDesc.getShardId(), cloudDesc.getCoreNodeName(), desc.getName());
 
       // check replica's existence in clusterstate first
       try {
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java
index 4dda1d2..3a14536 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java
@@ -784,7 +784,8 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
     success.add(key, value);
   }
 
-  private static NamedList<Object> waitForCoreAdminAsyncCallToComplete(String nodeName, String requestId, String adminPath, ZkStateReader zkStateReader, HttpShardHandlerFactory shardHandlerFactory, Overseer overseer) throws KeeperException, InterruptedException {
+  private static NamedList<Object> waitForCoreAdminAsyncCallToComplete(String nodeName, String requestId, String adminPath, ZkStateReader zkStateReader, HttpShardHandlerFactory shardHandlerFactory,
+      Overseer overseer) throws KeeperException, InterruptedException {
     ShardHandler shardHandler = shardHandlerFactory.getShardHandler();
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set(CoreAdminParams.ACTION, CoreAdminAction.REQUESTSTATUS.toString());
@@ -792,94 +793,96 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
     int counter = 0;
     ShardRequest sreq;
 
-      sreq = new ShardRequest();
-      params.set("qt", adminPath);
-      sreq.purpose = 1;
-      String replica = zkStateReader.getBaseUrlForNodeName(nodeName);
-      sreq.shards = new String[]{replica};
-      sreq.actualShards = sreq.shards;
-      sreq.params = params;
-      CountDownLatch latch = new CountDownLatch(1);
-
-      // mn- from DistributedMap
-      final String asyncPathToWaitOn = Overseer.OVERSEER_ASYNC_IDS + "/mn-" + requestId;
-
-      Watcher waitForAsyncId = new Watcher() {
-        @Override
-        public void process(WatchedEvent event) {
-          if (Watcher.Event.EventType.None.equals(event.getType())) {
-            return;
-          }
-          if (event.getType().equals(Watcher.Event.EventType.NodeCreated)) {
-            latch.countDown();
-          } else if (event.getType().equals(Event.EventType.NodeDeleted)) {
-            // no-op: gets deleted below once we're done with it
-            return;
-          }
+    sreq = new ShardRequest();
+    params.set("qt", adminPath);
+    sreq.purpose = 1;
+    String replica = zkStateReader.getBaseUrlForNodeName(nodeName);
+    sreq.shards = new String[] {replica};
+    sreq.actualShards = sreq.shards;
+    sreq.params = params;
+    CountDownLatch latch = new CountDownLatch(1);
 
-          Stat rstats2 = null;
-          try {
-            rstats2 = zkStateReader.getZkClient().exists(asyncPathToWaitOn, this);
-          } catch (KeeperException e) {
-            log.error("ZooKeeper exception", e);
-            return;
-          } catch (InterruptedException e) {
-            log.info("interrupted");
-            return;
-          }
-          if (rstats2 != null) {
-            latch.countDown();
-          }
+    // mn- from DistributedMap
+    final String asyncPathToWaitOn = Overseer.OVERSEER_ASYNC_IDS + "/mn-" + requestId;
 
+    Watcher waitForAsyncId = new Watcher() {
+      @Override
+      public void process(WatchedEvent event) {
+        if (Watcher.Event.EventType.None.equals(event.getType())) {
+          return;
+        }
+        if (event.getType().equals(Watcher.Event.EventType.NodeCreated)) {
+          latch.countDown();
+        } else if (event.getType().equals(Event.EventType.NodeDeleted)) {
+          latch.countDown();
+          return;
         }
-      };
 
-      Stat rstats = zkStateReader.getZkClient().exists(asyncPathToWaitOn, waitForAsyncId);
+        Stat rstats2 = null;
+        try {
+          rstats2 = zkStateReader.getZkClient().exists(asyncPathToWaitOn, this);
+        } catch (KeeperException e) {
+          log.error("ZooKeeper exception", e);
+          return;
+        } catch (InterruptedException e) {
+          log.info("interrupted");
+          return;
+        }
+        if (rstats2 != null) {
+          latch.countDown();
+        }
 
-      if (rstats != null) {
-        latch.countDown();
       }
+    };
 
-      latch.await(15, TimeUnit.SECONDS); // nocommit - still need a central timeout strat
+    Stat rstats = zkStateReader.getZkClient().exists(asyncPathToWaitOn, waitForAsyncId);
 
-      shardHandler.submit(sreq, replica, sreq.params);
+    if (rstats != null) {
+      latch.countDown();
+    }
 
-      ShardResponse srsp;
+    boolean success = latch.await(15, TimeUnit.SECONDS); // nocommit - still need a central timeout strat
+    if (!success) {
+      throw new SolrException(ErrorCode.SERVER_ERROR, "Timeout waiting to see async zk node " + asyncPathToWaitOn);
+    }
 
-      srsp = shardHandler.takeCompletedOrError();
-      if (srsp != null) {
-        NamedList<Object> results = new NamedList<>();
-        processResponse(results, srsp, Collections.emptySet());
-        if (srsp.getSolrResponse().getResponse() == null) {
-          NamedList<Object> response = new NamedList<>();
-          response.add("STATUS", "failed");
-          return response;
-        }
+    shardHandler.submit(sreq, replica, sreq.params);
 
-        String r = (String) srsp.getSolrResponse().getResponse().get("STATUS");
-        if (r.equals("running")) {
-          if (log.isDebugEnabled())  log.debug("The task is still RUNNING, continuing to wait.");
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Task is still running even after reporting complete requestId: " + requestId + "" + srsp.getSolrResponse().getResponse().get("STATUS") +
-                  "retried " + counter + "times");
-        } else if (r.equals("completed")) {
-          // we're done with this entry in the DistributeMap
-          overseer.getCoreContainer().getZkController().clearAsyncId(requestId);
-          if (log.isDebugEnabled()) log.debug("The task is COMPLETED, returning");
-          return srsp.getSolrResponse().getResponse();
-        } else if (r.equals("failed")) {
-          // TODO: Improve this. Get more information.
-          if (log.isDebugEnabled()) log.debug("The task is FAILED, returning");
-
-        } else if (r.equals("notfound")) {
-          if (log.isDebugEnabled()) log.debug("The task is notfound, retry");
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid status request for requestId: " + requestId + "" + srsp.getSolrResponse().getResponse().get("STATUS") +
-                  "retried " + counter + "times");
-        } else {
-          throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid status request " + srsp.getSolrResponse().getResponse().get("STATUS"));
-        }
+    ShardResponse srsp;
+
+    srsp = shardHandler.takeCompletedOrError();
+    if (srsp != null) {
+      NamedList<Object> results = new NamedList<>();
+      processResponse(results, srsp, Collections.emptySet());
+      if (srsp.getSolrResponse().getResponse() == null) {
+        NamedList<Object> response = new NamedList<>();
+        response.add("STATUS", "failed");
+        return response;
       }
 
-    throw new SolrException(ErrorCode.SERVER_ERROR, "No response on request for async status");
+      String r = (String) srsp.getSolrResponse().getResponse().get("STATUS");
+      if (r.equals("running")) {
+        if (log.isDebugEnabled()) log.debug("The task is still RUNNING, continuing to wait.");
+        throw new SolrException(ErrorCode.BAD_REQUEST,
+            "Task is still running even after reporting complete requestId: " + requestId + "" + srsp.getSolrResponse().getResponse().get("STATUS") + "retried " + counter + "times");
+      } else if (r.equals("completed")) {
+        // we're done with this entry in the DistributeMap
+        overseer.getCoreContainer().getZkController().clearAsyncId(requestId);
+        if (log.isDebugEnabled()) log.debug("The task is COMPLETED, returning");
+        return srsp.getSolrResponse().getResponse();
+      } else if (r.equals("failed")) {
+        // TODO: Improve this. Get more information.
+        if (log.isDebugEnabled()) log.debug("The task is FAILED, returning");
+
+      } else if (r.equals("notfound")) {
+        if (log.isDebugEnabled()) log.debug("The task is notfound, retry");
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid status request for requestId: " + requestId + "" + srsp.getSolrResponse().getResponse().get("STATUS") + "retried " + counter + "times");
+      } else {
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid status request " + srsp.getSolrResponse().getResponse().get("STATUS"));
+      }
+    }
+
+    throw new SolrException(ErrorCode.SERVER_ERROR, "No response on request for async status url="+ replica + " params=" + sreq.params);
   }
 
   @Override
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
index a3b28c3..c0a279e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
@@ -298,7 +298,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         String subShardName = subShardNames.get(i);
         DocRouter.Range subRange = subRanges.get(i);
 
-        log.debug("Creating slice {} of collection {} on {}", subSlice, collectionName, nodeName);
+        log.info("Creating slice {} of collection {} on {}", subSlice, collectionName, nodeName);
 
         Map<String, Object> propMap = new HashMap<>();
         propMap.put(Overseer.QUEUE_OPERATION, CREATESHARD.toLower());
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index c67a629..ffe4d73 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -57,7 +57,6 @@ import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.store.Directory;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
-import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
 import org.apache.solr.client.solrj.impl.CloudHttp2SolrClient;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
@@ -66,7 +65,6 @@ import org.apache.solr.client.solrj.impl.XMLResponseParser;
 import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.util.SolrIdentifierValidator;
 import org.apache.solr.cloud.CloudDescriptor;
-import org.apache.solr.cloud.OverseerTaskQueue;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.cloud.autoscaling.AutoScalingHandler;
 import org.apache.solr.common.AlreadyClosedException;
@@ -1128,6 +1126,8 @@ public class CoreContainer implements Closeable {
       closer.collect(authenPlugin);
       closer.collect(auditPlugin);
       closer.collect(callables);
+      closer.collect(metricsHistoryHandler);
+
 
       closer.addCollect();
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 39b3486..8e2d4c9 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -1166,24 +1166,8 @@ public final class SolrCore implements SolrInfoBean, Closeable {
 
       final DocCollection collection = clusterState.getCollectionOrNull(coreDescriptor.getCloudDescriptor().getCollectionName());
       if (collection != null) {
-
-        if (coreContainer.getZkController().getZkClient().isConnected()) {
-          // make sure we see our shard first - these tries to cover a surprising race where we don't find our shard in the clusterstate
-          // in the below bufferUpdatesIfConstructing call
-
-          try {
-            coreContainer.getZkController().getZkStateReader().waitForState(coreDescriptor.getCollectionName(),
-                10, TimeUnit.SECONDS, (l,c) -> c != null && c.getSlice(coreDescriptor.getCloudDescriptor().getShardId()) != null);
-          } catch (InterruptedException e) {
-            ParWork.propagateInterrupt(e);
-            throw new SolrException(ErrorCode.SERVER_ERROR, e);
-          } catch (TimeoutException e) {
-            throw new SolrException(ErrorCode.SERVER_ERROR, e);
-          }
-        }
-
         final Slice slice = collection.getSlice(coreDescriptor.getCloudDescriptor().getShardId());
-        if (slice.getState() == Slice.State.CONSTRUCTION) {
+        if (slice != null && slice.getState() == Slice.State.CONSTRUCTION) {
           // set update log to buffer before publishing the core
           assert getUpdateHandler().getUpdateLog() != null;
           getUpdateHandler().getUpdateLog().bufferUpdates();
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index 6e23b02..3968911 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -100,9 +100,9 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
   protected volatile URLClassLoader resourceClassLoader;
   private final Path instanceDir;
 
-  private final Set<SolrCoreAware> waitingForCore = ConcurrentHashMap.newKeySet(5000);
-  private final Set<SolrInfoBean> infoMBeans = ConcurrentHashMap.newKeySet(5000);
-  private final Set<ResourceLoaderAware> waitingForResources = ConcurrentHashMap.newKeySet(5000);
+  private final Set<SolrCoreAware> waitingForCore = ConcurrentHashMap.newKeySet(256);
+  private final Set<SolrInfoBean> infoMBeans = ConcurrentHashMap.newKeySet(256);
+  private final Set<ResourceLoaderAware> waitingForResources = ConcurrentHashMap.newKeySet(256);
 
   // Provide a registry so that managed resources can register themselves while the XML configuration
   // documents are being parsed ... after all are registered, they are asked by the RestManager to
diff --git a/solr/core/src/java/org/apache/solr/core/ZkContainer.java b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
index 02e32ff..71e704a 100644
--- a/solr/core/src/java/org/apache/solr/core/ZkContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
@@ -218,7 +218,7 @@ public class ZkContainer implements Closeable {
             ParWork.propagateInterrupt(e);
             SolrException exp = new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
             try {
-              if (zkController.isConnected()) {
+              if (zkController.isConnected() && !zkController.getCoreContainer().isShutDown()) {
                 zkController.publish(cd, Replica.State.DOWN);
               }
             } catch (Exception e1) {
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
index 85063b8..c4c3a6d 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
@@ -628,7 +628,7 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
 
     try (ParWork closer = new ParWork(this)) {
       closer.collect(knownDbs.values());
-      closer.collect();
+      closer.collect(solrClient);
       closer.collect(factory);
       closer.collect(collectService);
     }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
index 12a2f6b..b95fd01 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
@@ -80,11 +80,19 @@ class PrepRecoveryOp implements CoreAdminHandler.CoreAdminOp {
           coreContainer.waitForLoadingCore(cname, 30000);
           try (SolrCore core2 = coreContainer.getCore(cname)) {
             if (core2 == null) {
-              throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "core not found:" + cname);
+              Thread.sleep(2000); // nocommit - wait better
+              try (SolrCore core3 = coreContainer.getCore(cname)) {
+                if (core3 == null) {
+                  throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "core not found:" + cname);
+                }
+                collectionName = core2.getCoreDescriptor().getCloudDescriptor().getCollectionName();
+                cloudDescriptor = core2.getCoreDescriptor()
+                    .getCloudDescriptor();
+              }
+            } else {
+              collectionName = core2.getCoreDescriptor().getCloudDescriptor().getCollectionName();
+              cloudDescriptor = core2.getCoreDescriptor().getCloudDescriptor();
             }
-            collectionName = core2.getCoreDescriptor().getCloudDescriptor().getCollectionName();
-            cloudDescriptor = core2.getCoreDescriptor()
-                .getCloudDescriptor();
           }
         }
       } else {
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
index b302bfe..e3d8365 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
@@ -51,6 +51,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -74,7 +75,7 @@ public class HttpShardHandler extends ShardHandler {
   private Map<ShardResponse,Cancellable> responseCancellableMap;
   private BlockingQueue<ShardResponse> responses;
   private AtomicInteger pending;
-  private Map<String, List<String>> shardToURLs;
+  private final Map<String, List<String>> shardToURLs;
   private LBHttp2SolrClient lbClient;
 
   public HttpShardHandler(HttpShardHandlerFactory httpShardHandlerFactory) {
@@ -82,7 +83,7 @@ public class HttpShardHandler extends ShardHandler {
     this.lbClient = httpShardHandlerFactory.loadbalancer;
     this.pending = new AtomicInteger(0);
     this.responses = new LinkedBlockingQueue<>();
-    this.responseCancellableMap = new HashMap<>();
+    this.responseCancellableMap = new ConcurrentHashMap<>();
 
     // maps "localhost:8983|localhost:7574" to a shuffled List("http://localhost:8983","http://localhost:7574")
     // This is primarily to keep track of what order we should use to query the replicas of a shard
diff --git a/solr/core/src/java/org/apache/solr/response/QueryResponseWriterUtil.java b/solr/core/src/java/org/apache/solr/response/QueryResponseWriterUtil.java
index 4146384..7be62b7 100644
--- a/solr/core/src/java/org/apache/solr/response/QueryResponseWriterUtil.java
+++ b/solr/core/src/java/org/apache/solr/response/QueryResponseWriterUtil.java
@@ -61,13 +61,13 @@ public final class QueryResponseWriterUtil {
           // See SOLR-8669.
         }
       };
-      Writer writer = buildWriter(out, ContentStreamBase.getCharsetFromContentType(contentType));
+      FastWriter writer = buildWriter(out, ContentStreamBase.getCharsetFromContentType(contentType));
       responseWriter.write(writer, solrRequest, solrResponse);
       writer.flush();
     }
   }
   
-  private static Writer buildWriter(OutputStream outputStream, String charset) throws UnsupportedEncodingException {
+  private static FastWriter buildWriter(OutputStream outputStream, String charset) throws UnsupportedEncodingException {
     Writer writer = (charset == null) ? new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)
         : new OutputStreamWriter(outputStream, charset);
     
diff --git a/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java b/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java
index 98284eb..a0bc8f8 100644
--- a/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/XSLTResponseWriter.java
@@ -97,7 +97,7 @@ public class XSLTResponseWriter implements QueryResponseWriter {
     final Transformer t = getTransformer(request);
     
     // capture the output of the XMLWriter
-    final CharArrayWriter w = new CharArrayWriter(64);
+    final CharArrayWriter w = new CharArrayWriter(256);
     XMLWriter.writeResponse(w,request,response);
     
     // and write transformed result to our writer
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index b01a97d..ccb0471 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -80,6 +80,7 @@ import org.apache.solr.core.SolrXmlConfig;
 import org.apache.solr.core.XmlConfigFile;
 import org.apache.solr.metrics.AltBufferPoolMetricSet;
 import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.OperatingSystemMetricSet;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.rest.schema.FieldTypeXmlAdapter;
@@ -235,13 +236,12 @@ public class SolrDispatchFilter extends BaseSolrFilter {
     registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm);
     final Set<String> hiddenSysProps = coresInit.getConfig().getMetricsConfig().getHiddenSysProps();
     try {
-      metricManager.registerAll(registryName, new AltBufferPoolMetricSet(), false, "buffers");
-      metricManager.registerAll(registryName, new ClassLoadingGaugeSet(), false, "classes");
-      // nocommit - yuck
-      //metricManager.registerAll(registryName, new OperatingSystemMetricSet(), SolrMetricManager.ResolutionStrategy.IGNORE, "os");
-      metricManager.registerAll(registryName, new GarbageCollectorMetricSet(), false, "gc");
-      metricManager.registerAll(registryName, new MemoryUsageGaugeSet(), false, "memory");
-      metricManager.registerAll(registryName, new ThreadStatesGaugeSet(), false, "threads"); // todo should we use CachedThreadStatesGaugeSet instead?
+      metricManager.registerAll(registryName, new AltBufferPoolMetricSet(), true, "buffers");
+      metricManager.registerAll(registryName, new ClassLoadingGaugeSet(), true, "classes");
+      metricManager.registerAll(registryName, new OperatingSystemMetricSet(), true, "os");
+      metricManager.registerAll(registryName, new GarbageCollectorMetricSet(), true, "gc");
+      metricManager.registerAll(registryName, new MemoryUsageGaugeSet(), true, "memory");
+      metricManager.registerAll(registryName, new ThreadStatesGaugeSet(), true, "threads"); // todo should we use CachedThreadStatesGaugeSet instead?
       MetricsMap sysprops = new MetricsMap((detailed, map) -> {
         System.getProperties().forEach((k, v) -> {
           if (!hiddenSysProps.contains(k)) {
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrShutdownHandler.java b/solr/core/src/java/org/apache/solr/servlet/SolrShutdownHandler.java
index cabde78..f8c912b 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrShutdownHandler.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrShutdownHandler.java
@@ -42,52 +42,54 @@ public class SolrShutdownHandler extends HandlerWrapper implements Graceful {
     public Future<Void> shutdown() {
         log.error("GRACEFUL SHUTDOWN CALLED");
 //        return new FutureCallback(true);
-        return new Future<Void>() {
-            @Override
-            public boolean cancel(boolean b) {
-                return false;
-            }
-
-            @Override
-            public boolean isCancelled() {
-                return false;
-            }
+        return new VoidShutdownFuture();
+    }
 
-            @Override
-            public synchronized boolean isDone() {
-                return false;
-            }
+    @Override
+    public boolean isShutdown() {
+        return true;
+    }
 
-            @Override
-            public synchronized Void get() throws InterruptedException, ExecutionException {
-                synchronized (SolrShutdownHandler.class) {
-                    try (ParWork work = new ParWork(this)) {
-                        for (Runnable run : shutdowns) {
-                            work.collect("shutdown", () -> run.run());
-                        }
+    private static class VoidShutdownFuture implements Future<Void> {
+        @Override
+        public boolean cancel(boolean b) {
+            return false;
+        }
+
+        @Override
+        public boolean isCancelled() {
+            return false;
+        }
+
+        @Override
+        public synchronized boolean isDone() {
+            return false;
+        }
+
+        @Override
+        public synchronized Void get() throws InterruptedException, ExecutionException {
+            synchronized (SolrShutdownHandler.class) {
+                try (ParWork work = new ParWork(this)) {
+                    for (Runnable run : shutdowns) {
+                        work.collect("shutdown", () -> run.run());
                     }
                 }
-                return null;
+                shutdowns.clear();
             }
-
-            @Override
-            public synchronized Void get(long l, TimeUnit timeUnit) throws InterruptedException, ExecutionException, TimeoutException {
-                synchronized (SolrShutdownHandler.class) {
-                    try (ParWork work = new ParWork(this)) {
-                        for (Runnable run : shutdowns) {
-                            work.collect("shutdown", () -> run.run());
-                        }
+            return null;
+        }
+
+        @Override
+        public synchronized Void get(long l, TimeUnit timeUnit) throws InterruptedException, ExecutionException, TimeoutException {
+            synchronized (SolrShutdownHandler.class) {
+                try (ParWork work = new ParWork(this)) {
+                    for (Runnable run : shutdowns) {
+                        work.collect("shutdown", () -> run.run());
                     }
-                    shutdowns.clear();
                 }
-
-                return null;
+                shutdowns.clear();
             }
-        };
-    }
-
-    @Override
-    public boolean isShutdown() {
-        return true;
+            return null;
+        }
     }
 }
diff --git a/solr/core/src/test/org/apache/solr/CursorPagingTest.java b/solr/core/src/test/org/apache/solr/CursorPagingTest.java
index 181b5da..54b5ecc 100644
--- a/solr/core/src/test/org/apache/solr/CursorPagingTest.java
+++ b/solr/core/src/test/org/apache/solr/CursorPagingTest.java
@@ -699,9 +699,9 @@ public class CursorPagingTest extends SolrTestCaseJ4 {
   /**
    * test faceting with deep paging
    */
-  @Ignore // nocommit debug - flakey test, everytime I start fixing things, I notice this can fail
+  @Nightly // slow
   public void testFacetingWithRandomSorts() throws Exception {
-    final int numDocs = TestUtil.nextInt(random(), TEST_NIGHTLY ? 1000 : 100, TEST_NIGHTLY ? 3000 : 500);
+    final int numDocs = TestUtil.nextInt(random(), 1000, 3000);
     String[] fieldsToFacetOn = { "int", "long", "str" };
     String[] facetMethods = { "enum", "fc", "fcs" };
 
diff --git a/solr/core/src/test/org/apache/solr/TestRandomFaceting.java b/solr/core/src/test/org/apache/solr/TestRandomFaceting.java
index dbc6788..2d6df31 100644
--- a/solr/core/src/test/org/apache/solr/TestRandomFaceting.java
+++ b/solr/core/src/test/org/apache/solr/TestRandomFaceting.java
@@ -31,6 +31,7 @@ import java.util.Set;
 import java.util.function.Consumer;
 import java.util.regex.Pattern;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.common.SolrException.ErrorCode;
@@ -39,12 +40,12 @@ import org.apache.solr.common.util.Utils;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.SchemaField;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @Slow
+@LuceneTestCase.Nightly // slow
 public class TestRandomFaceting extends SolrTestCaseJ4 {
 
   private static final Pattern trieFields = Pattern.compile(".*_t.");
@@ -150,10 +151,9 @@ public class TestRandomFaceting extends SolrTestCaseJ4 {
   }
 
   @Test
-  @Ignore // nocommit debug
   public void testRandomFaceting() throws Exception {
     Random rand = random();
-    int iter = atLeast(TEST_NIGHTLY ? 100 : 15);
+    int iter = atLeast(100);
     init();
     addMoreDocs(0);
     
@@ -203,7 +203,7 @@ public class TestRandomFaceting extends SolrTestCaseJ4 {
         params.add("facet.offset", Integer.toString(offset));
       }
 
-      int limit = TEST_NIGHTLY ? 100 : 10;
+      int limit = 100;
       if (rand.nextInt(100) < 20) {
         if (rand.nextBoolean()) {
           limit = rand.nextInt(100) < 10 ? rand.nextInt(indexSize/2+1) : rand.nextInt(indexSize*2);
@@ -263,7 +263,7 @@ public class TestRandomFaceting extends SolrTestCaseJ4 {
           }
           
           // if (random().nextBoolean()) params.set("facet.mincount", "1");  // uncomment to test that validation fails
-          if (!(params.getInt("facet.limit", TEST_NIGHTLY ? 100 : 10) == 0 &&
+          if (!(params.getInt("facet.limit", 100) == 0 &&
               !params.getBool("facet.missing", false))) {
             // it bypasses all processing, and we can go to empty validation
             if (exists && params.getInt("facet.mincount", 0)>1) {
@@ -320,7 +320,7 @@ public class TestRandomFaceting extends SolrTestCaseJ4 {
     if (err != null) {
       log.error("ERROR: mismatch facet response: {}\n expected ={}\n response = {}\n request = {}"
           , err, expected, actual, params);
-      fail(err);
+      fail(err + " method=" + method);
     }
   }
 
@@ -367,7 +367,7 @@ public class TestRandomFaceting extends SolrTestCaseJ4 {
         stratified.addAll(stratas.get(s));
       }// cropping them now
       int offset=params.getInt("facet.offset", 0) * 2;
-      int end = offset + params.getInt("facet.limit", TEST_NIGHTLY ? 100 : 10) * 2 ;
+      int end = offset + params.getInt("facet.limit", 100) * 2 ;
       int fromIndex = offset > stratified.size() ?  stratified.size() : offset;
       stratified = stratified.subList(fromIndex, 
                end > stratified.size() ?  stratified.size() : end);
diff --git a/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java b/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java
deleted file mode 100644
index 31afcfc..0000000
--- a/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.client.solrj.impl;
-
-import java.io.IOException;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.http.HttpClientConnection;
-import org.apache.http.HttpConnectionMetrics;
-import org.apache.http.HttpException;
-import org.apache.http.HttpHost;
-import org.apache.http.HttpRequest;
-import org.apache.http.HttpVersion;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.protocol.HttpClientContext;
-import org.apache.http.conn.ConnectionPoolTimeoutException;
-import org.apache.http.conn.ConnectionRequest;
-import org.apache.http.conn.routing.HttpRoute;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
-import org.apache.http.message.BasicHttpRequest;
-import org.apache.solr.SolrTestCase;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.cloud.SolrCloudTestCase;
-import org.apache.solr.common.ParWork;
-import org.apache.solr.update.AddUpdateCommand;
-import org.apache.solr.util.TestInjection;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-
-@SolrTestCase.SuppressSSL
-@Ignore // nocommit look at this again later
-public class ConnectionReuseTest extends SolrCloudTestCase {
-  
-  private AtomicInteger id = new AtomicInteger();
-  private HttpClientContext context = HttpClientContext.create();
-
-  private static final String COLLECTION = "collection1";
-
-  @BeforeClass
-  public static void setupCluster() throws Exception {
-    if (TEST_NIGHTLY) TestInjection.failUpdateRequests = "true:100";
-    configureCluster(1).formatZk(true)
-        .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
-        .configure();
-
-    CollectionAdminRequest.createCollection(COLLECTION, "config", 1, 1)
-        .process(cluster.getSolrClient());
-  }
-
-  private SolrClient buildClient(CloseableHttpClient httpClient, String url) {
-    switch (random().nextInt(3)) {
-      case 0:
-        // currently only testing with 1 thread
-        return SolrTestCaseJ4.getConcurrentUpdateSolrClient(url.toString() + "/" + COLLECTION, httpClient, 6, 1);
-      case 1:
-        return SolrTestCaseJ4.getHttpSolrClient(url + "/" + COLLECTION);
-      case 2:
-        CloudSolrClient client = SolrTestCaseJ4.getCloudSolrClient(cluster.getZkServer().getZkAddress(), random().nextBoolean(), httpClient, 30000, 60000);
-        client.setDefaultCollection(COLLECTION);
-        return client;
-    }
-    throw new RuntimeException("impossible");
-  }
-  
-  @Test
-  public void testConnectionReuse() throws Exception {
-
-    String url = cluster.getJettySolrRunners().get(0).getBaseUrl();
-    String host = cluster.getJettySolrRunners().get(0).getHost();
-    int port = cluster.getJettySolrRunners().get(0).getLocalPort();
-    PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
-
-    CloseableHttpClient httpClient = HttpClientUtil.createClient(null, cm);
-    try (SolrClient client = buildClient(httpClient, url)) {
-      HttpHost target = new HttpHost(host, port, isSSLMode() ? "https" : "http");
-      HttpRoute route = new HttpRoute(target);
-
-      ConnectionRequest mConn = getClientConnectionRequest(httpClient, route, cm);
-
-      HttpClientConnection conn1 = getConn(mConn);
-      headerRequest(target, route, conn1, cm);
-
-      cm.releaseConnection(conn1, null, -1, TimeUnit.MILLISECONDS);
-
-      int queueBreaks = 0;
-      int cnt1 = atLeast(3);
-      int cnt2 = atLeast(30);
-      for (int j = 0; j < cnt1; j++) {
-        boolean done = false;
-        for (int i = 0; i < cnt2; i++) {
-          AddUpdateCommand c = new AddUpdateCommand(null);
-          c.solrDoc = SolrTestCaseJ4.sdoc("id", id.incrementAndGet());
-          try {
-            client.add(c.solrDoc);
-          } catch (Exception e) {
-            ParWork.propagateInterrupt(e);
-            e.printStackTrace();
-          }
-          if (!done && i > 0 && i < cnt2 - 1 && client instanceof ConcurrentUpdateSolrClient
-              && random().nextInt(10) > 8) {
-            queueBreaks++;
-            done = true;
-          }
-        }
-        if (client instanceof ConcurrentUpdateSolrClient) {
-          try {
-            ((ConcurrentUpdateSolrClient) client).blockUntilFinished();
-          } catch (Exception e) {
-            ParWork.propagateInterrupt(e);
-            e.printStackTrace();
-          }
-        }
-      }
-
-      route = new HttpRoute(new HttpHost(host, port, isSSLMode() ? "https" : "http"));
-
-      mConn = cm.requestConnection(route, HttpSolrClient.cacheKey);
-
-      HttpClientConnection conn2 = getConn(mConn);
-
-      HttpConnectionMetrics metrics = conn2.getMetrics();
-      headerRequest(target, route, conn2, cm);
-
-      cm.releaseConnection(conn2, null, -1, TimeUnit.MILLISECONDS);
-
-      assertNotNull("No connection metrics found - is the connection getting aborted? server closing the connection? "
-          + client.getClass().getSimpleName(), metrics);
-
-      // we try and make sure the connection we get has handled all of the requests in this test
-      if (client instanceof ConcurrentUpdateSolrClient) {
-        // we can't fully control queue polling breaking up requests - allow a bit of leeway
-        int exp = cnt1 + queueBreaks + 2;
-        assertTrue(
-            "We expected all communication via streaming client to use one connection! expected=" + exp + " got="
-                + metrics.getRequestCount(),
-            Math.max(exp, metrics.getRequestCount()) - Math.min(exp, metrics.getRequestCount()) < 3);
-      } else {
-        assertTrue("We expected all communication to use one connection! " + client.getClass().getSimpleName() + " "
-            + metrics.getRequestCount(),
-            cnt1 * cnt2 + 2 <= metrics.getRequestCount());
-      }
-
-    }
-    finally {
-      HttpClientUtil.close(httpClient);
-      cm.shutdown();
-    }
-  }
-
-  public HttpClientConnection getConn(ConnectionRequest mConn)
-      throws InterruptedException, ConnectionPoolTimeoutException, ExecutionException {
-    HttpClientConnection conn = mConn.get(30, TimeUnit.SECONDS);
-
-    return conn;
-  }
-
-  public void headerRequest(HttpHost target, HttpRoute route, HttpClientConnection conn, PoolingHttpClientConnectionManager cm)
-      throws IOException, HttpException {
-    HttpRequest req = new BasicHttpRequest("OPTIONS", "*", HttpVersion.HTTP_1_1);
-
-    req.addHeader("Host", target.getHostName());
-    if (!conn.isOpen()) {
-      // establish connection based on its route info
-      cm.connect(conn, route, 1000, context);
-      // and mark it as route complete
-      cm.routeComplete(conn, route, context);
-    }
-    conn.sendRequestHeader(req);
-    conn.flush();
-    conn.receiveResponseHeader();
-  }
-
-  public ConnectionRequest getClientConnectionRequest(HttpClient httpClient, HttpRoute route, PoolingHttpClientConnectionManager cm) {
-    ConnectionRequest mConn = cm.requestConnection(route, HttpSolrClient.cacheKey);
-    return mConn;
-  }
-
-}
-
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
index 7781f3c..525063f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
@@ -94,8 +94,6 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
   }
 
   @Test
-  @Ignore // nocommit: investigate
-  // commented out on: 01-Apr-2019   @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018
   public void deleteLiveReplicaTest() throws Exception {
 
     final String collectionName = "delLiveColl";
@@ -130,13 +128,8 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
     
     CollectionAdminRequest.deleteReplica(collectionName, shard.getName(), replica.getName())
         .process(cluster.getSolrClient());
-    waitForState("Expected replica " + replica.getName() + " to have been removed", collectionName, (n, c) -> {
-      Slice testShard = c.getSlice(shard.getName());
-      return testShard.getReplica(replica.getName()) == null;
-    });
     
-    // the core should no longer have a watch collection state since it was removed
-    // the core should no longer have a watch collection state since it was removed
+    // the core should no longer have a watch collection state since it was removed    // the core should no longer have a watch collection state since it was removed
     TimeOut timeOut = new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME);
     timeOut.waitFor("Waiting for core's watcher to be removed", () -> {
         final long postDeleteWatcherCount = countUnloadCoreOnDeletedWatchers
@@ -207,7 +200,6 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
   }
 
   @Test
-  @Ignore // nocommit: investigate
   public void deleteReplicaFromClusterState() throws Exception {
     final String collectionName = "deleteFromClusterStateCollection";
     CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3)
diff --git a/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java
index dfaf126..5fba1c1 100644
--- a/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/MetricsHistoryIntegrationTest.java
@@ -51,7 +51,7 @@ import org.slf4j.LoggerFactory;
  */
 @LuceneTestCase.Slow
 @LogLevel("org.apache.solr.handler.admin=DEBUG")
-@Ignore // nocommit debug
+@Ignore // nocommit debug, I think it takes a bit of time for metrics to be populated
 public class MetricsHistoryIntegrationTest extends SolrCloudTestCase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -62,6 +62,9 @@ public class MetricsHistoryIntegrationTest extends SolrCloudTestCase {
   @BeforeClass
   public static void setupCluster() throws Exception {
     System.setProperty("solr.disableDefaultJmxReporter", "false");
+    System.setProperty("solr.disableMetricsHistoryHandler", "false");
+    System.setProperty("solr.suppressDefaultConfigBootstrap", "false");
+
     boolean simulated = TEST_NIGHTLY ? random().nextBoolean() : true;
     if (simulated) {
       cloudManager = SimCloudManager.createCluster(1, TimeSource.get("simTime:50"));
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java
index a6079d0..17faf85 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java
@@ -26,6 +26,7 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrClient;
@@ -76,7 +77,7 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase {
   }
 
   @Test
-  @Ignore // nocommit
+  @Ignore // nocommit perhaps due to async on search side? An async call returns no response while splitting: No response on request for async status
   public void testSolrJAPICalls() throws Exception {
 
     final CloudHttp2SolrClient client = cluster.getSolrClient();
@@ -86,9 +87,8 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase {
 
     cluster.waitForActiveCollection("testasynccollectioncreation", 1, 1);
 
-    // nocommit need to get abort for prep recovery back
-//    state = CollectionAdminRequest.createCollection("testasynccollectioncreation", "conf1", 1, 1).processAndWait(client, MAX_TIMEOUT_SECONDS);
-//    assertSame("Recreating a collection with the same should have failed.", RequestStatusState.FAILED, state);
+    state = CollectionAdminRequest.createCollection("testasynccollectioncreation", "conf1", 1, 1).processAndWait(client, MAX_TIMEOUT_SECONDS);
+    assertSame("Recreating a collection with the same should have failed.", RequestStatusState.FAILED, state);
 
     state = CollectionAdminRequest.addReplicaToShard("testasynccollectioncreation", "shard1").processAndWait(client, MAX_TIMEOUT_SECONDS);
     assertSame("Add replica did not complete", RequestStatusState.COMPLETED, state);
@@ -214,7 +214,6 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase {
     assertSame("DeleteCollection did not complete", RequestStatusState.COMPLETED, state);
   }
 
-  @Ignore // nocommit debug
   public void testAsyncIdRaceCondition() throws Exception {
 
     SolrClient[] clients = new SolrClient[cluster.getJettySolrRunners().size()];
@@ -252,7 +251,7 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase {
               if (log.isInfoEnabled()) {
                 log.info("{} - Reloading Collection.", Thread.currentThread().getName());
               }
-              reloadCollectionRequest.processAsync("repeatedId", clients[random().nextInt(clients.length)]);
+              reloadCollectionRequest.processAsync("repeatedId", clients[LuceneTestCase.random().nextInt(clients.length)]);
               numSuccess.incrementAndGet();
             } catch (SolrServerException e) {
               if (log.isInfoEnabled()) {
diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
index a8fa086..5f2fc7d 100644
--- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
+++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
@@ -39,7 +39,7 @@ import org.junit.Test;
 import static java.util.Arrays.asList;
 import static org.apache.solr.handler.TestSolrConfigHandlerCloud.compareValues;
 
-//@Ignore // nocommit debug, perhaps timing? We add a config overlay and then don't find it
+@Ignore // nocommit debug, perhaps timing? We add a config overlay and then don't find it
 public class TestDynamicLoading extends AbstractFullDistribZkTestBase {
 
   @BeforeClass
diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
index d46838a..fcf0f18 100644
--- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
+++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
@@ -99,7 +99,7 @@ public class TestLazyCores extends SolrTestCaseJ4 {
   }
   
   @Test
-  //@Ignore // nocommit harden
+  @Ignore // nocommit harden
   public void testLazyLoad() throws Exception {
     CoreContainer cc = init();
     try {
diff --git a/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java b/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java
index 9b9161b..636dbed 100644
--- a/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java
@@ -20,14 +20,17 @@ package org.apache.solr.handler;
 import java.io.File;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.V2Request;
 import org.apache.solr.common.util.NamedList;
+import org.junit.Ignore;
 import org.junit.Test;
 
+@LuceneTestCase.AwaitsFix(bugUrl = "http2 client does not follow redirects and 404's")
 public class V2StandaloneTest extends SolrTestCaseJ4{
 
   @Test
@@ -39,8 +42,8 @@ public class V2StandaloneTest extends SolrTestCaseJ4{
     JettySolrRunner jetty = new JettySolrRunner(solrHomeTmp.getAbsolutePath(), buildJettyConfig("/solr"));
     jetty.start();
 
-    try (Http2SolrClient client = getHttpSolrClient(buildUrl(jetty.getLocalPort(),"/solr/"))) {
-      NamedList res = client.request(new V2Request.Builder("/").build());
+    try (Http2SolrClient client = getHttpSolrClient(buildUrl(jetty.getLocalPort(),"/solr"))) {
+      NamedList res = client.request(new V2Request.Builder("").build());
       NamedList header = (NamedList) res.get("responseHeader");
       assertEquals(0, header.get("status"));
 
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java
index dcfc749..e6f8568 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java
@@ -227,6 +227,7 @@ public class SplitHandlerTest extends SolrTestCaseJ4 {
   }
 
   @Test
+  @Nightly // slow
   public void testHistogramBuilding() throws Exception {
     List<Prefix> prefixes = SplitByPrefixTest.findPrefixes(20, 0, 0x00ffffff);
     List<Prefix> uniquePrefixes = SplitByPrefixTest.removeDups(prefixes);
diff --git a/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java b/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java
index 26e76c8..896fab8 100644
--- a/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java
+++ b/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java
@@ -95,13 +95,10 @@ public class TestRemoteStreaming extends SolrJettyTestBase {
     Object obj = new URL(getUrl).getContent();
     if (obj instanceof InputStream) {
       InputStream inputStream = (InputStream) obj;
-      try {
-        StringWriter strWriter = new StringWriter();
-        IOUtils.copy(new InputStreamReader(inputStream, StandardCharsets.UTF_8),strWriter);
-        return strWriter.toString();
-      } finally {
-        IOUtils.closeQuietly(inputStream);
-      }
+
+      StringWriter strWriter = new StringWriter();
+      IOUtils.copy(new InputStreamReader(inputStream, StandardCharsets.UTF_8), strWriter);
+      return strWriter.toString();
     }
     return null;
   }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java
index 3064cac..dbb7596 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java
@@ -228,12 +228,12 @@ public class Http2SolrClient extends SolrClient {
       } else {
         log.debug("Create Http2SolrClient with HTTP/1.1 transport");
       }
-      SolrHttpClientTransportOverHTTP transport = new SolrHttpClientTransportOverHTTP(1);
+      SolrHttpClientTransportOverHTTP transport = new SolrHttpClientTransportOverHTTP(2);
       httpClient = new HttpClient(transport, sslContextFactory);
     } else {
       log.debug("Create Http2SolrClient with HTTP/2 transport");
       HTTP2Client http2client = new HTTP2Client();
-      http2client.setSelectors(1);
+      http2client.setSelectors(2);
       http2client.setIdleTimeout(idleTimeout);
       http2client.setMaxConcurrentPushedStreams(512);
       http2client.setInputBufferSize(16384);
@@ -254,7 +254,7 @@ public class Http2SolrClient extends SolrClient {
       httpClient.setConnectBlocking(false);
       httpClient.setFollowRedirects(false);
       if (builder.maxConnectionsPerHost != null) httpClient.setMaxConnectionsPerDestination(builder.maxConnectionsPerHost);
-      httpClient.setMaxRequestsQueuedPerDestination(100000);
+      httpClient.setMaxRequestsQueuedPerDestination(1024);
       httpClient.setUserAgentField(new HttpField(HttpHeader.USER_AGENT, AGENT));
       httpClient.setIdleTimeout(idleTimeout);
       httpClient.setTCPNoDelay(true);
@@ -577,7 +577,7 @@ public class Http2SolrClient extends SolrClient {
     RequestWriter.ContentWriter contentWriter = requestWriter.getContentWriter(solrRequest);
     Collection<ContentStream> streams = contentWriter == null ? requestWriter.getContentStreams(solrRequest) : null;
     String path = requestWriter.getPath(solrRequest);
-    if (path == null || !path.startsWith("/")) {
+    if (path == null) {
       path = DEFAULT_PATH;
     }
 
@@ -768,7 +768,7 @@ public class Http2SolrClient extends SolrClient {
           break;
         default:
           if (processor == null || mimeType == null) {
-            throw new RemoteSolrException(serverBaseUrl, httpStatus, "non ok status: " + httpStatus
+            throw new RemoteSolrException(response.getRequest().getURI().toString(), httpStatus, "non ok status: " + httpStatus
                 + ", message:" + response.getReason(),
                 null);
           }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
index 5334edd..8f318c1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
@@ -151,7 +151,7 @@ public class V2Request extends SolrRequest<V2Response> implements MapWriter {
      * @param resource resource of the request for example "/collections" or "/cores/core-name"
      */
     public Builder(String resource) {
-      if (!resource.startsWith("/")) resource = "/" + resource;
+      if (!resource.startsWith("/") && !resource.equals("")) resource = "/" + resource;
       this.resource = resource;
     }
 
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java
index 9537df8..6bc9218 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java
@@ -197,7 +197,7 @@ public class SolrTestCase extends LuceneTestCase {
     testStartTime = System.nanoTime();
 
 
-    testExecutor = new PerThreadExecService(ParWork.getRootSharedExecutor(), 12, true, false);
+    testExecutor = new PerThreadExecService(ParWork.getRootSharedExecutor(), 60, true, false);
     ((PerThreadExecService) testExecutor).closeLock(true);
 
     interruptThreadsOnTearDown("RootExec", false);
diff --git a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
index 9565e28..da8d1f2 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
@@ -336,12 +336,12 @@ public class TestHarness extends BaseTestHarness {
       }
       QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
       if (responseWriter instanceof BinaryQueryResponseWriter) {
-        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(256);
+        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(768);
         BinaryQueryResponseWriter writer = (BinaryQueryResponseWriter) responseWriter;
         writer.write(byteArrayOutputStream, req, rsp);
         return new String(byteArrayOutputStream.toByteArray(), StandardCharsets.UTF_8);
       } else {
-        StringWriter sw = new StringWriter(256);
+        StringWriter sw = new StringWriter(768);
         responseWriter.write(sw,req,rsp);
         return sw.toString();
       }