You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by no...@apache.org on 2020/06/22 13:49:32 UTC

[lucene-solr] branch jira/solr14586 created (now 85c7c7d)

This is an automated email from the ASF dual-hosted git repository.

noble pushed a change to branch jira/solr14586
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git.


      at 85c7c7d  SOLR-14586: replace the second function parameter in computeIfAbsent with static vars

This branch includes the following new commits:

     new 85c7c7d  SOLR-14586: replace the second function parameter in computeIfAbsent with static vars

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[lucene-solr] 01/01: SOLR-14586: replace the second function parameter in computeIfAbsent with static vars

Posted by no...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

noble pushed a commit to branch jira/solr14586
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 85c7c7d3f290bad897b457cdc0863ef971457370
Author: noble <no...@apache.org>
AuthorDate: Mon Jun 22 23:48:46 2020 +1000

    SOLR-14586: replace the second function parameter in computeIfAbsent with static vars
---
 .../solr/cloud/autoscaling/ComputePlanAction.java  | 16 ++---
 .../cloud/autoscaling/InactiveShardPlanAction.java |  8 +--
 .../solr/cloud/autoscaling/IndexSizeTrigger.java   | 15 +----
 .../solr/cloud/autoscaling/ScheduledTriggers.java  | 36 +++--------
 .../solr/cloud/autoscaling/SearchRateTrigger.java  | 32 ++++------
 .../cloud/autoscaling/sim/SimCloudManager.java     | 61 ++++--------------
 .../autoscaling/sim/SimClusterStateProvider.java   | 74 ++++------------------
 .../apache/solr/handler/RequestHandlerBase.java    | 16 +++--
 .../apache/solr/handler/admin/ClusterStatus.java   | 22 +------
 .../solr/handler/admin/IndexSizeEstimator.java     | 66 ++++++-------------
 .../solr/handler/admin/MetricsHistoryHandler.java  | 57 ++++-------------
 .../java/org/apache/solr/util/RedactionUtils.java  | 10 +--
 .../java/org/apache/solr/common/util/Utils.java    | 74 +++++++---------------
 13 files changed, 134 insertions(+), 353 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
index 33bf6b0..8165595 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
@@ -17,6 +17,12 @@
 
 package org.apache.solr.cloud.autoscaling;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.*;
@@ -28,17 +34,11 @@ import org.apache.solr.common.params.AutoScalingParams;
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.util.Pair;
 import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.*;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
-
 import static org.apache.solr.cloud.autoscaling.TriggerEvent.NODE_NAMES;
 
 /**
@@ -247,7 +247,7 @@ public class ComputePlanAction extends TriggerActionBase {
         suggester = session.getSuggester(op.getAction());
         if (suggester instanceof UnsupportedSuggester) {
           @SuppressWarnings({"unchecked"})
-          List<TriggerEvent.Op> unsupportedOps = (List<TriggerEvent.Op>)context.getProperties().computeIfAbsent(TriggerEvent.UNSUPPORTED_OPS, k -> new ArrayList<TriggerEvent.Op>());
+          List<TriggerEvent.Op> unsupportedOps = (List<TriggerEvent.Op>)context.getProperties().computeIfAbsent(TriggerEvent.UNSUPPORTED_OPS, Utils.NEW_ARRAYLIST_FUN);
           unsupportedOps.add(op);
         }
         for (Map.Entry<Suggester.Hint, Object> e : op.getHints().entrySet()) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
index d3de649..f2354a0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
@@ -17,14 +17,12 @@
 package org.apache.solr.cloud.autoscaling;
 
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
-
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -81,7 +79,7 @@ public class InactiveShardPlanAction extends TriggerActionBase {
     state.forEachCollection(coll ->
       coll.getSlices().forEach(s -> {
         if (Slice.State.INACTIVE.equals(s.getState())) {
-          inactive.computeIfAbsent(coll.getName(), c -> new ArrayList<>()).add(s.getName());
+          inactive.computeIfAbsent(coll.getName(), Utils.NEW_ARRAYLIST_FUN).add(s.getName());
           String tstampStr = s.getStr(ZkStateReader.STATE_TIMESTAMP_PROP);
           if (tstampStr == null || tstampStr.isEmpty()) {
             return;
@@ -98,9 +96,9 @@ public class InactiveShardPlanAction extends TriggerActionBase {
               log.debug("-- delete inactive {} / {}", coll.getName(), s.getName());
             }
             @SuppressWarnings({"unchecked", "rawtypes"})
-            List<SolrRequest> operations = (List<SolrRequest>)context.getProperties().computeIfAbsent("operations", k -> new ArrayList<>());
+            List<SolrRequest> operations = (List<SolrRequest>)context.getProperties().computeIfAbsent("operations", Utils.NEW_ARRAYLIST_FUN);
             operations.add(CollectionAdminRequest.deleteShard(coll.getName(), s.getName()));
-            cleanup.computeIfAbsent(coll.getName(), c -> new ArrayList<>()).add(s.getName());
+            cleanup.computeIfAbsent(coll.getName(), Utils.NEW_ARRAYLIST_FUN).add(s.getName());
           }
         }
         // check for stale shard split locks
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
index da40366..f052da3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
@@ -19,19 +19,10 @@ package org.apache.solr.cloud.autoscaling;
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
+import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.Locale;
-
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.Suggester;
@@ -377,7 +368,7 @@ public class IndexSizeTrigger extends TriggerBase {
       if ((Long)info.getVariable(BYTES_SIZE_KEY) > aboveBytes ||
           (Long)info.getVariable(DOCS_SIZE_KEY) > aboveDocs) {
         if (waitForElapsed(coreName, now, lastAboveEventMap)) {
-          List<ReplicaInfo> infos = aboveSize.computeIfAbsent(info.getCollection(), c -> new ArrayList<>());
+          List<ReplicaInfo> infos = aboveSize.computeIfAbsent(info.getCollection(), Utils.NEW_ARRAYLIST_FUN);
           if (!infos.contains(info)) {
             if ((Long)info.getVariable(BYTES_SIZE_KEY) > aboveBytes) {
               info.getVariables().put(VIOLATION_KEY, ABOVE_BYTES_PROP);
@@ -403,7 +394,7 @@ public class IndexSizeTrigger extends TriggerBase {
           // make sure we don't produce conflicting ops
           !splittable.contains(info.getName())) {
         if (waitForElapsed(coreName, now, lastBelowEventMap)) {
-          List<ReplicaInfo> infos = belowSize.computeIfAbsent(info.getCollection(), c -> new ArrayList<>());
+          List<ReplicaInfo> infos = belowSize.computeIfAbsent(info.getCollection(), Utils.NEW_ARRAYLIST_FUN);
           if (!infos.contains(info)) {
             if ((Long)info.getVariable(BYTES_SIZE_KEY) < belowBytes) {
               info.getVariables().put(VIOLATION_KEY, BELOW_BYTES_PROP);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
index e080eec..17eb50c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
@@ -20,27 +20,12 @@ package org.apache.solr.cloud.autoscaling;
 import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
+import java.util.*;
+import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.stream.Collectors;
-
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
@@ -55,17 +40,14 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.IOUtils;
+import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.cloud.autoscaling.ExecutePlanAction.waitForTaskToFinish;
-import static org.apache.solr.common.params.AutoScalingParams.ACTION_THROTTLE_PERIOD_SECONDS;
-import static org.apache.solr.common.params.AutoScalingParams.TRIGGER_COOLDOWN_PERIOD_SECONDS;
-import static org.apache.solr.common.params.AutoScalingParams.TRIGGER_CORE_POOL_SIZE;
-import static org.apache.solr.common.params.AutoScalingParams.TRIGGER_SCHEDULE_DELAY_SECONDS;
+import static org.apache.solr.common.params.AutoScalingParams.*;
 import static org.apache.solr.common.util.ExecutorUtil.awaitTermination;
 
 /**
@@ -321,7 +303,7 @@ public class ScheduledTriggers implements Closeable {
               ActionContext actionContext = new ActionContext(cloudManager, newTrigger, new HashMap<>());
               for (TriggerAction action : actions) {
                 @SuppressWarnings({"unchecked"})
-                List<String> beforeActions = (List<String>) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.BEFORE_ACTION.toString(), k -> new ArrayList<String>());
+                List<String> beforeActions = (List<String>) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.BEFORE_ACTION.toString(), Utils.NEW_ARRAYLIST_FUN);
                 beforeActions.add(action.getName());
                 triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.BEFORE_ACTION, action.getName(), actionContext);
                 try {
@@ -331,7 +313,7 @@ public class ScheduledTriggers implements Closeable {
                   throw new TriggerActionException(event.getSource(), action.getName(), "Error processing action for trigger event: " + event, e);
                 }
                 @SuppressWarnings({"unchecked"})
-                List<String> afterActions = (List<String>) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.AFTER_ACTION.toString(), k -> new ArrayList<String>());
+                List<String> afterActions = (List<String>) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.AFTER_ACTION.toString(), Utils.NEW_ARRAYLIST_FUN);
                 afterActions.add(action.getName());
                 triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.AFTER_ACTION, action.getName(), actionContext);
               }
@@ -693,9 +675,9 @@ public class ScheduledTriggers implements Closeable {
                              Map<String, TriggerListener> listenersPerName) {
       this.listenersPerStage = new HashMap<>();
       listenersPerStage.forEach((n, listeners) -> {
-        Map<TriggerEventProcessorStage, List<TriggerListener>> perStage = this.listenersPerStage.computeIfAbsent(n, name -> new HashMap<>());
+        Map<TriggerEventProcessorStage, List<TriggerListener>> perStage = this.listenersPerStage.computeIfAbsent(n, Utils.NEW_HASHMAP_FUN);
         listeners.forEach((s, lst) -> {
-          List<TriggerListener> newLst = perStage.computeIfAbsent(s, stage -> new ArrayList<>());
+          List<TriggerListener> newLst = perStage.computeIfAbsent(s, Utils.NEW_ARRAYLIST_FUN);
           newLst.addAll(lst);
         });
       });
@@ -831,7 +813,7 @@ public class ScheduledTriggers implements Closeable {
 
     private void addPerStage(String triggerName, TriggerEventProcessorStage stage, TriggerListener listener) {
       Map<TriggerEventProcessorStage, List<TriggerListener>> perStage =
-          listenersPerStage.computeIfAbsent(triggerName, k -> new HashMap<>());
+          listenersPerStage.computeIfAbsent(triggerName, Utils.NEW_HASHMAP_FUN);
       List<TriggerListener> lst = perStage.computeIfAbsent(stage, k -> new ArrayList<>(3));
       lst.add(listener);
     }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
index efd5b24..8a8b970 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
@@ -16,25 +16,18 @@
  */
 package org.apache.solr.cloud.autoscaling;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.AtomicDouble;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.util.concurrent.AtomicDouble;
+import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
-import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.Suggester;
 import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType;
 import org.apache.solr.common.SolrException;
@@ -51,6 +44,9 @@ import org.apache.solr.metrics.SolrCoreMetricManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.util.Utils.NEW_ARRAYLIST_FUN;
+import static org.apache.solr.common.util.Utils.NEW_HASHMAP_FUN;
+
 /**
  * Trigger for the {@link org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType#SEARCHRATE} event.
  */
@@ -350,7 +346,7 @@ public class SearchRateTrigger extends TriggerBase {
       // coll, shard, replica
       Map<String, Map<String, List<ReplicaInfo>>> infos = cloudManager.getNodeStateProvider().getReplicaInfo(node, Collections.emptyList());
       infos.forEach((coll, shards) -> {
-        Map<String, AtomicInteger> replPerShard = searchableReplicationFactors.computeIfAbsent(coll, c -> new HashMap<>());
+        Map<String, AtomicInteger> replPerShard = searchableReplicationFactors.computeIfAbsent(coll, NEW_HASHMAP_FUN);
         shards.forEach((sh, replicas) -> {
           AtomicInteger repl = replPerShard.computeIfAbsent(sh, s -> new AtomicInteger());
           replicas.forEach(replica -> {
@@ -383,8 +379,8 @@ public class SearchRateTrigger extends TriggerBase {
         if (info == null) {
           log.warn("Missing replica info for response tag {}", tag);
         } else {
-          Map<String, List<ReplicaInfo>> perCollection = collectionRates.computeIfAbsent(info.getCollection(), s -> new HashMap<>());
-          List<ReplicaInfo> perShard = perCollection.computeIfAbsent(info.getShard(), s -> new ArrayList<>());
+          Map<String, List<ReplicaInfo>> perCollection = collectionRates.computeIfAbsent(info.getCollection(), NEW_HASHMAP_FUN);
+          List<ReplicaInfo> perShard = perCollection.computeIfAbsent(info.getShard(), NEW_ARRAYLIST_FUN);
           info = (ReplicaInfo)info.clone();
           info.getVariables().put(AutoScalingParams.RATE, ((Number)rate).doubleValue());
           perShard.add(info);
@@ -458,11 +454,11 @@ public class SearchRateTrigger extends TriggerBase {
             (shard.equals(Policy.ANY) || shard.equals(sh))) {
           if (shardRate > aboveRate) {
             if (waitForElapsed(elapsedKey, now, lastShardEvent)) {
-              hotShards.computeIfAbsent(coll, s -> new HashMap<>()).put(sh, shardRate);
+              hotShards.computeIfAbsent(coll, NEW_HASHMAP_FUN).put(sh, shardRate);
             }
           } else if (shardRate < belowRate) {
             if (waitForElapsed(elapsedKey, now, lastShardEvent)) {
-              coldShards.computeIfAbsent(coll, s -> new HashMap<>()).put(sh, shardRate);
+              coldShards.computeIfAbsent(coll, NEW_HASHMAP_FUN).put(sh, shardRate);
               log.debug("-- coldShard waitFor elapsed {}", elapsedKey);
             } else {
               if (log.isDebugEnabled()) {
@@ -636,7 +632,7 @@ public class SearchRateTrigger extends TriggerBase {
     hotShards.forEach((coll, shards) -> shards.forEach((s, r) -> {
       List<Pair<String, String>> perShard = hints
           .computeIfAbsent(coll, c -> new HashMap<>())
-          .computeIfAbsent(s, sh -> new ArrayList<>());
+          .computeIfAbsent(s, NEW_ARRAYLIST_FUN);
       addReplicaHints(coll, s, r, searchableReplicationFactors.get(coll).get(s).get(), perShard);
       violations.add(HOT_SHARDS);
     }));
@@ -697,7 +693,7 @@ public class SearchRateTrigger extends TriggerBase {
     Map<String, Map<String, List<ReplicaInfo>>> byCollectionByShard = new HashMap<>();
     coldReplicas.forEach(ri -> {
       byCollectionByShard.computeIfAbsent(ri.getCollection(), c -> new HashMap<>())
-          .computeIfAbsent(ri.getShard(), s -> new ArrayList<>())
+          .computeIfAbsent(ri.getShard(), NEW_ARRAYLIST_FUN)
           .add(ri);
     });
     coldShards.forEach((coll, perShard) -> {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
index 25624f4..f962aaf 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
@@ -17,33 +17,18 @@
 
 package org.apache.solr.cloud.autoscaling.sim;
 
+import com.codahale.metrics.jvm.ClassLoadingGaugeSet;
+import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
+import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
+import com.codahale.metrics.jvm.ThreadStatesGaugeSet;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentSkipListMap;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
+import java.util.*;
+import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
-
-import com.codahale.metrics.jvm.ClassLoadingGaugeSet;
-import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
-import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
-import com.codahale.metrics.jvm.ThreadStatesGaugeSet;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrResponse;
@@ -56,12 +41,7 @@ import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.Variable;
 import org.apache.solr.client.solrj.impl.ClusterStateProvider;
-import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.request.QueryRequest;
-import org.apache.solr.client.solrj.request.RequestWriter;
-import org.apache.solr.client.solrj.request.UpdateRequest;
-import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.client.solrj.request.*;
 import org.apache.solr.client.solrj.response.RequestStatusState;
 import org.apache.solr.client.solrj.response.SolrResponseBase;
 import org.apache.solr.client.solrj.response.UpdateResponse;
@@ -75,37 +55,21 @@ import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.cloud.rule.ImplicitSnitch;
-import org.apache.solr.common.params.CollectionAdminParams;
-import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.common.params.CommonAdminParams;
-import org.apache.solr.common.params.CommonParams;
-import org.apache.solr.common.params.CoreAdminParams;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.util.ContentStreamBase;
-import org.apache.solr.common.util.ExecutorUtil;
-import org.apache.solr.common.util.IOUtils;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.ObjectCache;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.common.params.*;
+import org.apache.solr.common.util.*;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.handler.admin.MetricsHandler;
 import org.apache.solr.handler.admin.MetricsHistoryHandler;
-import org.apache.solr.metrics.AltBufferPoolMetricSet;
-import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.OperatingSystemMetricSet;
-import org.apache.solr.metrics.SolrMetricManager;
-import org.apache.solr.metrics.SolrMetricsContext;
+import org.apache.solr.metrics.*;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.util.MockSearchableSolrClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler.REQUESTID;
+import static org.apache.solr.common.util.Utils.NEW_ATOMICLONG_FUN;
 
 /**
  * Simulated {@link SolrCloudManager}.
@@ -735,8 +699,7 @@ public class SimCloudManager implements SolrCloudManager {
   }
 
   private void incrementCount(String op) {
-    AtomicLong count = opCounts.computeIfAbsent(op, o -> new AtomicLong());
-    count.incrementAndGet();
+    opCounts.computeIfAbsent(op, NEW_ATOMICLONG_FUN).incrementAndGet();
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index 6943f2c..661364b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -17,24 +17,10 @@
 
 package org.apache.solr.cloud.autoscaling.sim;
 
+import com.google.common.util.concurrent.AtomicDouble;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Random;
-import java.util.Set;
-import java.util.TreeMap;
+import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
@@ -44,20 +30,10 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.stream.Collectors;
-
-import com.google.common.util.concurrent.AtomicDouble;
 import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
-import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
-import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
-import org.apache.solr.client.solrj.cloud.autoscaling.BadVersionException;
-import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
-import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
-import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
-import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType;
-import org.apache.solr.client.solrj.cloud.autoscaling.Variable;
+import org.apache.solr.client.solrj.cloud.autoscaling.*;
 import org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type;
-import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.client.solrj.impl.ClusterStateProvider;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -66,34 +42,16 @@ import org.apache.solr.client.solrj.response.UpdateResponse;
 import org.apache.solr.cloud.ActionThrottle;
 import org.apache.solr.cloud.CloudUtil;
 import org.apache.solr.cloud.Overseer;
-import org.apache.solr.cloud.api.collections.AddReplicaCmd;
-import org.apache.solr.cloud.api.collections.Assign;
-import org.apache.solr.cloud.api.collections.CreateCollectionCmd;
-import org.apache.solr.cloud.api.collections.CreateShardCmd;
-import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler;
-import org.apache.solr.cloud.api.collections.SplitShardCmd;
+import org.apache.solr.cloud.api.collections.*;
 import org.apache.solr.cloud.overseer.ClusterStateMutator;
 import org.apache.solr.cloud.overseer.CollectionMutator;
 import org.apache.solr.cloud.overseer.ZkWriteCommand;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.DocRouter;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.ReplicaPosition;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.cloud.*;
 import org.apache.solr.common.cloud.rule.ImplicitSnitch;
-import org.apache.solr.common.params.CollectionAdminParams;
-import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.common.params.CommonAdminParams;
-import org.apache.solr.common.params.CommonParams;
-import org.apache.solr.common.params.CoreAdminParams;
-import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.params.UpdateParams;
+import org.apache.solr.common.params.*;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrInfoBean;
@@ -104,16 +62,10 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
-import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS;
-import static org.apache.solr.common.cloud.ZkStateReader.PULL_REPLICAS;
-import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
-import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.TLOG_REPLICAS;
+import static org.apache.solr.common.cloud.ZkStateReader.*;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MODIFYCOLLECTION;
 import static org.apache.solr.common.params.CommonParams.NAME;
+import static org.apache.solr.common.util.Utils.NEW_HASHMAP_FUN;
 
 /**
  * Simulated {@link ClusterStateProvider}.
@@ -199,7 +151,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
                 props.put(ZkStateReader.STATE_PROP, ri.getState().toString());
                 Replica r = new Replica(ri.getName(), props, ri.getCollection(), ri.getShard());
                 collMap.computeIfAbsent(ri.getCollection(), c -> new HashMap<>())
-                    .computeIfAbsent(ri.getShard(), s -> new HashMap<>())
+                    .computeIfAbsent(ri.getShard(), NEW_HASHMAP_FUN)
                     .put(ri.getName(), r);
               });
             }
@@ -351,10 +303,10 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         // DocCollection will be created later
         collectionsStatesRef.put(dc.getName(), new CachedCollectionRef(dc.getName(), dc.getZNodeVersion()));
         collProperties.computeIfAbsent(dc.getName(), name -> new ConcurrentHashMap<>()).putAll(dc.getProperties());
-        opDelays.computeIfAbsent(dc.getName(), Utils.NEW_HASHMAP_FUN).putAll(defaultOpDelays);
+        opDelays.computeIfAbsent(dc.getName(), NEW_HASHMAP_FUN).putAll(defaultOpDelays);
         dc.getSlices().forEach(s -> {
           sliceProperties.computeIfAbsent(dc.getName(), name -> new ConcurrentHashMap<>())
-              .computeIfAbsent(s.getName(), Utils.NEW_HASHMAP_FUN).putAll(s.getProperties());
+              .computeIfAbsent(s.getName(), NEW_HASHMAP_FUN).putAll(s.getProperties());
           Replica leader = s.getLeader();
           s.getReplicas().forEach(r -> {
             Map<String, Object> props = new HashMap<>(r.getProperties());
@@ -368,7 +320,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             if (liveNodes.get().contains(r.getNodeName())) {
               nodeReplicaMap.computeIfAbsent(r.getNodeName(), Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN).add(ri);
               colShardReplicaMap.computeIfAbsent(ri.getCollection(), name -> new ConcurrentHashMap<>())
-                  .computeIfAbsent(ri.getShard(), shard -> new ArrayList<>()).add(ri);
+                  .computeIfAbsent(ri.getShard(), Utils.NEW_ARRAYLIST_FUN).add(ri);
             } else {
               log.warn("- dropping replica because its node {} is not live: {}", r.getNodeName(), r);
             }
@@ -2301,7 +2253,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
     // core_node_name is not unique across collections
     Map<String, Map<String, ReplicaInfo>> infoMap = new HashMap<>();
-    infos.forEach(ri -> infoMap.computeIfAbsent(ri.getCollection(), Utils.NEW_HASHMAP_FUN).put(ri.getName(), ri));
+    infos.forEach(ri -> infoMap.computeIfAbsent(ri.getCollection(), NEW_HASHMAP_FUN).put(ri.getName(), ri));
     source.forEach((coll, shards) -> shards.forEach((shard, replicas) -> replicas.forEach(r -> {
       ReplicaInfo target = infoMap.getOrDefault(coll, Collections.emptyMap()).get(r.getName());
       if (target == null) {
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index 1fcc183..92c18b8 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -16,15 +16,15 @@
  */
 package org.apache.solr.handler;
 
-import java.lang.invoke.MethodHandles;
-import java.util.Collection;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.Timer;
 import com.google.common.collect.ImmutableList;
+import java.lang.invoke.MethodHandles;
+import java.util.Collection;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Function;
 import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
 import org.apache.solr.api.ApiSupport;
@@ -184,6 +184,8 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
 
   public abstract void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception;
 
+  public static final Function<String, Counter> NEW_COUNTER_FUN = s -> new Counter();
+
   @Override
   public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) {
     requests.inc();
@@ -191,12 +193,12 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
     boolean distrib = req.getParams().getBool(CommonParams.DISTRIB,
         req.getCore() != null ? req.getCore().getCoreContainer().isZooKeeperAware() : false);
     if (req.getParams().getBool(ShardParams.IS_SHARD, false)) {
-      shardPurposes.computeIfAbsent("total", name -> new Counter()).inc();
+      shardPurposes.computeIfAbsent("total", NEW_COUNTER_FUN).inc();
       int purpose = req.getParams().getInt(ShardParams.SHARDS_PURPOSE, 0);
       if (purpose != 0) {
         String[] names = SolrPluginUtils.getRequestPurposeNames(purpose);
         for (String n : names) {
-          shardPurposes.computeIfAbsent(n, name -> new Counter()).inc();
+          shardPurposes.computeIfAbsent(n, NEW_COUNTER_FUN).inc();
         }
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
index 2265c9b..d982632 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
@@ -16,26 +16,10 @@
  */
 package org.apache.solr.handler.admin;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 import java.util.stream.Collectors;
-
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.Aliases;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.DocRouter;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.cloud.*;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
@@ -65,7 +49,7 @@ public class ClusterStatus {
       List<String> colls = entry.getValue();
       for (String coll : colls) {
         if (collection == null || collection.equals(coll))  {
-          List<String> list = collectionVsAliases.computeIfAbsent(coll, k -> new ArrayList<>());
+          List<String> list = collectionVsAliases.computeIfAbsent(coll, Utils.NEW_ARRAYLIST_FUN);
           list.add(alias);
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java b/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java
index 13fefa3..1ea545a 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/IndexSizeEstimator.java
@@ -19,44 +19,17 @@ package org.apache.solr.handler.admin;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.nio.file.Paths;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Objects;
+import java.util.*;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.function.Function;
-
 import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
 import org.apache.lucene.codecs.StoredFieldsReader;
-import org.apache.lucene.index.BinaryDocValues;
-import org.apache.lucene.index.CodecReader;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.FieldInfos;
-import org.apache.lucene.index.Fields;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.NumericDocValues;
-import org.apache.lucene.index.PointValues;
-import org.apache.lucene.index.PostingsEnum;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.index.SortedSetDocValues;
-import org.apache.lucene.index.StandardDirectoryReader;
-import org.apache.lucene.index.StoredFieldVisitor;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.index.*;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.PriorityQueue;
-import org.apache.lucene.util.RamUsageEstimator;
-import org.apache.lucene.util.SuppressForbidden;
-import org.apache.lucene.util.UnicodeUtil;
+import org.apache.lucene.util.*;
 import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.util.Utils;
 import org.slf4j.Logger;
@@ -223,7 +196,7 @@ public class IndexSizeEstimator {
       Map<String, Object> perType = (Map<String, Object>)((Map<String, Object>)perField).get("perType");
       perType.forEach((type, size) -> {
         if (type.contains("_lengths")) {
-          AtomicLong totalSize = typeSizes.computeIfAbsent(type.replace("_lengths", ""), t -> new AtomicLong());
+          AtomicLong totalSize = typeSizes.computeIfAbsent(type.replace("_lengths", ""), Utils.NEW_ATOMICLONG_FUN);
           totalSize.addAndGet(((AtomicLong)size).get());
         }
       });
@@ -273,16 +246,16 @@ public class IndexSizeEstimator {
     log.info("- preparing summary...");
     details.forEach((type, perType) -> {
       ((Map<String, Object>)perType).forEach((field, perField) -> {
-        Map<String, Object> perFieldSummary = (Map<String, Object>)summary.computeIfAbsent(field, f -> new HashMap<>());
+        Map<String, Object> perFieldSummary = (Map<String, Object>)summary.computeIfAbsent(field,Utils.NEW_HASHMAP_FUN);
         ((Map<String, Object>)perField).forEach((k, val) -> {
           if (val instanceof SummaryStatistics) {
             SummaryStatistics stats = (SummaryStatistics)val;
             if (k.startsWith("lengths")) {
-              AtomicLong total = (AtomicLong)perFieldSummary.computeIfAbsent("totalSize", kt -> new AtomicLong());
+              AtomicLong total = (AtomicLong)perFieldSummary.computeIfAbsent("totalSize",Utils.NEW_ATOMICLONG_FUN);
               total.addAndGet((long)stats.getSum());
             }
-            Map<String, Object> perTypeSummary = (Map<String, Object>)perFieldSummary.computeIfAbsent("perType", pt -> new HashMap<>());
-            AtomicLong total = (AtomicLong)perTypeSummary.computeIfAbsent(type + "_" + k, t -> new AtomicLong());
+            Map<String, Object> perTypeSummary = (Map<String, Object>)perFieldSummary.computeIfAbsent("perType", Utils.NEW_HASHMAP_FUN);
+            AtomicLong total = (AtomicLong)perTypeSummary.computeIfAbsent(type + "_" + k, Utils.NEW_ATOMICLONG_FUN);
             total.addAndGet((long)stats.getSum());
           }
         });
@@ -301,8 +274,8 @@ public class IndexSizeEstimator {
         if (norms == null) {
           continue;
         }
-        Map<String, Object> perField = stats.computeIfAbsent(info.name, n -> new HashMap<>());
-        SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths", s -> new MapWriterSummaryStatistics());
+        Map<String, Object> perField = stats.computeIfAbsent(info.name, Utils.NEW_HASHMAP_FUN);
+        SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths", MapWriterSummaryStatistics.NEW_INST_FUN);
         while (norms.advance(norms.docID() + samplingStep) != DocIdSetIterator.NO_MORE_DOCS) {
           for (int i = 0; i < samplingStep; i++) {
             lengthSummary.addValue(8);
@@ -324,8 +297,8 @@ public class IndexSizeEstimator {
         if (values == null) {
           continue;
         }
-        Map<String, Object> perField = stats.computeIfAbsent(info.name, n -> new HashMap<>());
-        SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths", s -> new MapWriterSummaryStatistics());
+        Map<String, Object> perField = stats.computeIfAbsent(info.name, Utils.NEW_HASHMAP_FUN);
+        SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths", MapWriterSummaryStatistics.NEW_INST_FUN);
         lengthSummary.addValue(values.size() * values.getBytesPerDimension() * values.getNumIndexDimensions());
       }
     }
@@ -414,7 +387,7 @@ public class IndexSizeEstimator {
       return;
     }
     Map<String, Object> perField = stats.computeIfAbsent(field, n -> new HashMap<>());
-    SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_" + type, s -> new MapWriterSummaryStatistics());
+    SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_" + type, MapWriterSummaryStatistics.NEW_INST_FUN);
     while (values.advance(values.docID() + samplingStep) != DocIdSetIterator.NO_MORE_DOCS) {
       int len = valueLength.apply(values);
       for (int i = 0; i < samplingStep; i++) {
@@ -442,14 +415,14 @@ public class IndexSizeEstimator {
 
   private void estimateTermStats(String field, Terms terms, Map<String, Map<String, Object>> stats, boolean isSampling) throws IOException {
     Map<String, Object> perField = stats.computeIfAbsent(field, n -> new HashMap<>());
-    SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_terms", s -> new MapWriterSummaryStatistics());
-    SummaryStatistics docFreqSummary = (SummaryStatistics)perField.computeIfAbsent("docFreqs", s -> new MapWriterSummaryStatistics());
-    SummaryStatistics totalFreqSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_postings", s -> new MapWriterSummaryStatistics());
+    SummaryStatistics lengthSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_terms", MapWriterSummaryStatistics.NEW_INST_FUN);
+    SummaryStatistics docFreqSummary = (SummaryStatistics)perField.computeIfAbsent("docFreqs", MapWriterSummaryStatistics.NEW_INST_FUN);
+    SummaryStatistics totalFreqSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_postings", MapWriterSummaryStatistics.NEW_INST_FUN);
     // TODO: add this at some point
     //SummaryStatistics impactsSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_impacts", s -> new MapWriterSummaryStatistics());
     SummaryStatistics payloadSummary = null;
     if (terms.hasPayloads()) {
-      payloadSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_payloads", s -> new MapWriterSummaryStatistics());
+      payloadSummary = (SummaryStatistics)perField.computeIfAbsent("lengths_payloads",MapWriterSummaryStatistics.NEW_INST_FUN);
     }
     ItemPriorityQueue topLen = (ItemPriorityQueue)perField.computeIfAbsent("topLen", s -> new ItemPriorityQueue(topN));
     ItemPriorityQueue topTotalFreq = (ItemPriorityQueue)perField.computeIfAbsent("topTotalFreq", s -> new ItemPriorityQueue(topN));
@@ -547,6 +520,7 @@ public class IndexSizeEstimator {
   }
 
   public static class MapWriterSummaryStatistics extends SummaryStatistics implements MapWriter {
+    public static final  Function<String, Object> NEW_INST_FUN = s -> new MapWriterSummaryStatistics();
 
     @Override
     public void writeMap(EntryWriter ew) throws IOException {
@@ -662,8 +636,8 @@ public class IndexSizeEstimator {
     }
 
     private void countItem(String field, Object value, int size) {
-      Map<String, Object> perField = stats.computeIfAbsent(field, n -> new HashMap<>());
-      SummaryStatistics summary = (SummaryStatistics)perField.computeIfAbsent("lengths", s -> new MapWriterSummaryStatistics());
+      Map<String, Object> perField = stats.computeIfAbsent(field, Utils.NEW_HASHMAP_FUN);
+      SummaryStatistics summary = (SummaryStatistics)perField.computeIfAbsent("lengths", MapWriterSummaryStatistics.NEW_INST_FUN);
       for (int i = 0; i < samplingStep; i++) {
         summary.addValue(size);
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
index 5c475a1..bfc606b 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
@@ -16,8 +16,11 @@
  */
 package org.apache.solr.handler.admin;
 
+import static java.util.stream.Collectors.toMap;
 import javax.imageio.ImageIO;
-import java.awt.Color;
+
+import com.google.common.annotations.VisibleForTesting;
+import java.awt.*;
 import java.awt.image.BufferedImage;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -27,20 +30,8 @@ import java.lang.invoke.MethodHandles;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URL;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-import java.util.TimeZone;
+import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
@@ -51,8 +42,6 @@ import java.util.concurrent.atomic.DoubleAdder;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
 import org.apache.solr.client.solrj.SolrClient;
@@ -67,23 +56,13 @@ import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.cloud.LeaderElector;
 import org.apache.solr.cloud.Overseer;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.*;
 import org.apache.solr.common.params.CollectionAdminParams;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.Base64;
-import org.apache.solr.common.util.ExecutorUtil;
-import org.apache.solr.common.util.JavaBinCodec;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.Pair;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.common.util.TimeSource;
-import org.apache.solr.common.util.Utils;
+import org.apache.solr.common.util.*;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.rrd.SolrRrdBackendFactory;
@@ -91,25 +70,15 @@ import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.security.AuthorizationContext;
 import org.apache.solr.security.PermissionNameProvider;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.zookeeper.KeeperException;
 import org.rrd4j.ConsolFun;
 import org.rrd4j.DsType;
-import org.rrd4j.core.ArcDef;
-import org.rrd4j.core.Archive;
-import org.rrd4j.core.Datasource;
-import org.rrd4j.core.DsDef;
-import org.rrd4j.core.FetchData;
-import org.rrd4j.core.FetchRequest;
-import org.rrd4j.core.RrdDb;
-import org.rrd4j.core.RrdDef;
-import org.rrd4j.core.Sample;
+import org.rrd4j.core.*;
 import org.rrd4j.graph.RrdGraph;
 import org.rrd4j.graph.RrdGraphDef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static java.util.stream.Collectors.toMap;
 import static org.apache.solr.common.params.CommonParams.ID;
 
 /**
@@ -493,8 +462,8 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
         shards.forEach((sh, replicas) -> {
           String registry = SolrMetricManager.getRegistryName(Group.collection, coll);
           Map<String, Number> perReg = totals
-              .computeIfAbsent(Group.collection, g -> new HashMap<>())
-              .computeIfAbsent(registry, r -> new HashMap<>());
+              .computeIfAbsent(Group.collection, ( Function<Group, Map<String, Map<String, Number>>>)Utils.NEW_HASHMAP_FUN)
+              .computeIfAbsent(registry, Utils.NEW_HASHMAP_FUN);
           replicas.forEach(ri -> {
             collTags.forEach(tag -> {
               double value = ((Number)ri.getVariable(tag, 0.0)).doubleValue();
@@ -510,7 +479,7 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
         String registry = SolrMetricManager.getRegistryName(g);
         Map<String, Number> perReg = totals
             .computeIfAbsent(g, gr -> new HashMap<>())
-            .computeIfAbsent(registry, r -> new HashMap<>());
+            .computeIfAbsent(registry, Utils.NEW_HASHMAP_FUN);
         Set<String> names = new HashSet<>();
         names.addAll(counters.get(g.toString()));
         names.addAll(gauges.get(g.toString()));
@@ -527,7 +496,7 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
     String nodeReg = SolrMetricManager.getRegistryName(Group.node);
     Map<String, Number> perNodeReg = totals
         .computeIfAbsent(Group.node, gr -> new HashMap<>())
-        .computeIfAbsent(nodeReg, r -> new HashMap<>());
+        .computeIfAbsent(nodeReg, Utils.NEW_HASHMAP_FUN);
     perNodeReg.put(NUM_NODES_KEY, nodes.size());
 
     // add some global collection-level stats
@@ -537,7 +506,7 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
         String registry = SolrMetricManager.getRegistryName(Group.collection, coll.getName());
         Map<String, Number> perReg = totals
             .computeIfAbsent(Group.collection, g -> new HashMap<>())
-            .computeIfAbsent(registry, r -> new HashMap<>());
+            .computeIfAbsent(registry, Utils.NEW_HASHMAP_FUN);
         Slice[] slices = coll.getActiveSlicesArr();
         perReg.put(NUM_SHARDS_KEY, slices.length);
         DoubleAdder numActiveReplicas = new DoubleAdder();
diff --git a/solr/core/src/java/org/apache/solr/util/RedactionUtils.java b/solr/core/src/java/org/apache/solr/util/RedactionUtils.java
index 56909f4..5e89927 100644
--- a/solr/core/src/java/org/apache/solr/util/RedactionUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/RedactionUtils.java
@@ -17,13 +17,9 @@
 
 package org.apache.solr.util;
 
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
+import java.util.*;
 import java.util.regex.Pattern;
+import org.apache.solr.common.util.Utils;
 
 public class RedactionUtils {
   public static final String SOLR_REDACTION_SYSTEM_PATTERN_PROP = "solr.redaction.system.pattern";
@@ -75,7 +71,7 @@ public class RedactionUtils {
       }
       int codeSpace = codeSpaces.computeIfAbsent(redactionPrefix, p -> 4);
       int code = Math.abs(name.hashCode() % codeSpace);
-      Set<Integer> uniqueCode = uniqueCodes.computeIfAbsent(redactionPrefix, p -> new HashSet<>());
+      Set<Integer> uniqueCode = uniqueCodes.computeIfAbsent(redactionPrefix, Utils.NEW_HASHSET_FUN);
       while (uniqueCode.contains(code)) {
         codeSpace = codeSpace << 1;
         codeSpaces.put(redactionPrefix, codeSpace);
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
index 229417a..357d5a9 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
@@ -16,6 +16,26 @@
  */
 package org.apache.solr.common.util;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static java.util.Collections.*;
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+
+import java.io.*;
+import java.lang.invoke.MethodHandles;
+import java.net.URL;
+import java.net.URLDecoder;
+import java.nio.BufferOverflowException;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.BiConsumer;
+import java.util.function.Function;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
 import org.apache.http.client.HttpClient;
@@ -24,12 +44,7 @@ import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.client.solrj.impl.BinaryRequestWriter;
-import org.apache.solr.common.IteratorWriter;
-import org.apache.solr.common.LinkedHashMapWriter;
-import org.apache.solr.common.MapWriter;
-import org.apache.solr.common.MapWriterMap;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SpecProvider;
+import org.apache.solr.common.*;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkOperation;
 import org.apache.solr.common.cloud.ZkStateReader;
@@ -44,52 +59,11 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.MDC;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Reader;
-import java.io.StringReader;
-import java.io.Writer;
-import java.lang.invoke.MethodHandles;
-import java.net.URL;
-import java.net.URLDecoder;
-import java.nio.BufferOverflowException;
-import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
-import java.util.AbstractMap;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Objects;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.concurrent.Callable;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.function.BiConsumer;
-import java.util.function.Function;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import static java.nio.charset.StandardCharsets.UTF_8;
-import static java.util.Collections.singletonList;
-import static java.util.Collections.unmodifiableList;
-import static java.util.Collections.unmodifiableSet;
-import static java.util.concurrent.TimeUnit.NANOSECONDS;
-
 public class Utils {
   @SuppressWarnings({"rawtypes"})
+  public static final Function NEW_CONCURRENT_HASHMAP_FUN = o -> new ConcurrentHashMap<>();
+
+  @SuppressWarnings({"rawtypes"})
   public static final Function NEW_HASHMAP_FUN = o -> new HashMap<>();
   @SuppressWarnings({"rawtypes"})
   public static final Function NEW_LINKED_HASHMAP_FUN = o -> new LinkedHashMap<>();