You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by no...@apache.org on 2020/09/10 11:40:03 UTC

[lucene-solr] 01/01: propegate() to propagate()

This is an automated email from the ASF dual-hosted git repository.

noble pushed a commit to branch ref_impl_typo
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 78759c22ee6647fa4312f4f578378d4f5945f253
Author: noblepaul <no...@gmail.com>
AuthorDate: Thu Sep 10 21:37:51 2020 +1000

    propegate() to propagate()
---
 .../stream/AnalyticsShardRequestManager.java       |   4 +-
 .../src/java/org/apache/solr/api/AnnotatedApi.java |   3 +-
 solr/core/src/java/org/apache/solr/api/ApiBag.java |   4 +-
 .../src/java/org/apache/solr/api/V2HttpCall.java   |   6 +-
 .../client/solrj/embedded/EmbeddedSolrServer.java  |  10 +-
 .../client/solrj/embedded/JettySolrRunner.java     |  72 ++++-----
 .../java/org/apache/solr/cloud/ActionThrottle.java |   3 +-
 .../apache/solr/cloud/CloudConfigSetService.java   |   6 +-
 .../src/java/org/apache/solr/cloud/CloudUtil.java  |   4 +-
 .../java/org/apache/solr/cloud/LeaderElector.java  |  22 +--
 .../src/java/org/apache/solr/cloud/Overseer.java   |  49 +++---
 .../cloud/OverseerConfigSetMessageHandler.java     |  26 ++--
 .../apache/solr/cloud/OverseerElectionContext.java |   6 +-
 .../apache/solr/cloud/OverseerTaskProcessor.java   |  36 +++--
 .../org/apache/solr/cloud/OverseerTaskQueue.java   |  29 ++--
 .../solr/cloud/RecoveringCoreTermWatcher.java      |   2 +-
 .../org/apache/solr/cloud/RecoveryStrategy.java    |  68 ++++-----
 .../org/apache/solr/cloud/ReplicateFromLeader.java |   4 +-
 .../solr/cloud/ShardLeaderElectionContext.java     |  12 +-
 .../solr/cloud/ShardLeaderElectionContextBase.java |  12 +-
 .../java/org/apache/solr/cloud/SolrZkServer.java   |  22 +--
 .../java/org/apache/solr/cloud/SyncStrategy.java   |  15 +-
 .../java/org/apache/solr/cloud/ZkController.java   | 169 +++++++++------------
 .../org/apache/solr/cloud/ZkDistributedQueue.java  |  36 ++---
 .../java/org/apache/solr/cloud/ZkShardTerms.java   |  22 ++-
 .../apache/solr/cloud/ZkSolrResourceLoader.java    |  14 +-
 .../solr/cloud/api/collections/AddReplicaCmd.java  |  36 +++--
 .../apache/solr/cloud/api/collections/Assign.java  |  39 ++---
 .../cloud/api/collections/CreateCollectionCmd.java |  40 ++---
 .../solr/cloud/api/collections/DeleteNodeCmd.java  |   4 +-
 .../cloud/api/collections/DeleteReplicaCmd.java    |  17 ++-
 .../solr/cloud/api/collections/DeleteShardCmd.java |  18 +--
 .../api/collections/MaintainRoutedAliasCmd.java    |   2 +-
 .../solr/cloud/api/collections/MigrateCmd.java     |   4 +-
 .../solr/cloud/api/collections/MoveReplicaCmd.java |   4 +-
 .../OverseerCollectionMessageHandler.java          |  84 +++-------
 .../api/collections/ReindexCollectionCmd.java      |   8 +-
 .../solr/cloud/api/collections/ReplaceNodeCmd.java |   2 +-
 .../solr/cloud/api/collections/RoutedAlias.java    |   6 +-
 .../solr/cloud/api/collections/SplitShardCmd.java  |  16 +-
 .../cloud/api/collections/TimeRoutedAlias.java     |   4 +-
 .../solr/cloud/autoscaling/AutoScalingHandler.java |  14 +-
 .../solr/cloud/autoscaling/ComputePlanAction.java  |  29 ++--
 .../solr/cloud/autoscaling/ExecutePlanAction.java  |  14 +-
 .../autoscaling/InactiveMarkersPlanAction.java     |  25 +--
 .../cloud/autoscaling/InactiveShardPlanAction.java |   5 +-
 .../solr/cloud/autoscaling/IndexSizeTrigger.java   |  16 +-
 .../solr/cloud/autoscaling/NodeAddedTrigger.java   |   4 +-
 .../solr/cloud/autoscaling/NodeLostTrigger.java    |   6 +-
 .../cloud/autoscaling/OverseerTriggerThread.java   |  41 ++---
 .../solr/cloud/autoscaling/ScheduledTrigger.java   |   2 +-
 .../solr/cloud/autoscaling/ScheduledTriggers.java  |  35 +++--
 .../solr/cloud/autoscaling/SearchRateTrigger.java  |  16 +-
 .../solr/cloud/autoscaling/SystemLogListener.java  |   4 +-
 .../apache/solr/cloud/autoscaling/TriggerBase.java |  32 ++--
 .../solr/cloud/autoscaling/TriggerEventQueue.java  |  16 +-
 .../autoscaling/sim/GenericDistributedQueue.java   |   4 +-
 .../cloud/autoscaling/sim/SimCloudManager.java     |  20 +--
 .../autoscaling/sim/SimClusterStateProvider.java   |  87 +++++------
 .../autoscaling/sim/SimDistribStateManager.java    |   8 +-
 .../autoscaling/sim/SimNodeStateProvider.java      |   2 +-
 .../solr/cloud/autoscaling/sim/SimScenario.java    |  62 ++++----
 .../solr/cloud/overseer/CollectionMutator.java     |   2 +-
 .../apache/solr/cloud/overseer/OverseerAction.java |   6 +-
 .../apache/solr/cloud/overseer/ReplicaMutator.java |   8 +-
 .../apache/solr/cloud/overseer/ZkStateWriter.java  |  38 ++---
 .../apache/solr/cloud/rule/ReplicaAssigner.java    |   6 +-
 .../src/java/org/apache/solr/cloud/rule/Rule.java  |   2 +-
 .../java/org/apache/solr/core/BlobRepository.java  |   4 +-
 .../apache/solr/core/CachingDirectoryFactory.java  |  18 +--
 .../org/apache/solr/core/ConfigSetProperties.java  |   2 +-
 .../org/apache/solr/core/ConfigSetService.java     |  16 +-
 .../java/org/apache/solr/core/CoreContainer.java   |  33 ++--
 .../src/java/org/apache/solr/core/Diagnostics.java |   9 +-
 .../org/apache/solr/core/DirectoryFactory.java     |   3 +-
 .../org/apache/solr/core/HdfsDirectoryFactory.java |   2 +-
 .../java/org/apache/solr/core/MemClassLoader.java  |  20 +--
 .../src/java/org/apache/solr/core/PluginBag.java   |   9 +-
 .../org/apache/solr/core/QuerySenderListener.java  |   2 +-
 .../java/org/apache/solr/core/RequestParams.java   |   2 +-
 .../src/java/org/apache/solr/core/SolrConfig.java  |   9 +-
 .../src/java/org/apache/solr/core/SolrCore.java    |  54 ++++---
 .../src/java/org/apache/solr/core/SolrCores.java   |  24 +--
 .../org/apache/solr/core/SolrDeletionPolicy.java   |   2 +-
 .../src/java/org/apache/solr/core/SolrPaths.java   |   2 +-
 .../org/apache/solr/core/SolrResourceLoader.java   |  66 ++++----
 .../java/org/apache/solr/core/SolrXmlConfig.java   |  42 ++---
 .../solr/core/TransientSolrCoreCacheFactory.java   |   2 +-
 .../java/org/apache/solr/core/XmlConfigFile.java   |  42 ++---
 .../src/java/org/apache/solr/core/ZkContainer.java |   6 +-
 .../snapshots/SolrSnapshotMetaDataManager.java     |   2 +-
 .../solr/core/snapshots/SolrSnapshotsTool.java     |  17 +--
 .../apache/solr/filestore/DistribPackageStore.java |  16 +-
 .../org/apache/solr/filestore/PackageStoreAPI.java |  14 +-
 .../java/org/apache/solr/handler/CdcrParams.java   |  10 +-
 .../org/apache/solr/handler/CdcrReplicator.java    |   2 +-
 .../apache/solr/handler/CdcrReplicatorManager.java |  16 +-
 .../solr/handler/CdcrReplicatorScheduler.java      |  12 +-
 .../apache/solr/handler/CdcrRequestHandler.java    |  10 +-
 .../solr/handler/CdcrUpdateLogSynchronizer.java    |   4 +-
 .../handler/DocumentAnalysisRequestHandler.java    |  25 ++-
 .../org/apache/solr/handler/ExportHandler.java     |   2 +-
 .../java/org/apache/solr/handler/GraphHandler.java |   2 +-
 .../java/org/apache/solr/handler/IndexFetcher.java |  25 ++-
 .../apache/solr/handler/MoreLikeThisHandler.java   |   4 +-
 .../apache/solr/handler/ReplicationHandler.java    |  22 +--
 .../apache/solr/handler/RequestHandlerBase.java    |   5 +-
 .../java/org/apache/solr/handler/RestoreCore.java  |   6 +-
 .../org/apache/solr/handler/SchemaHandler.java     |   2 +-
 .../java/org/apache/solr/handler/SnapShooter.java  |   4 +-
 .../org/apache/solr/handler/SolrConfigHandler.java |  51 ++++---
 .../org/apache/solr/handler/StreamHandler.java     |   3 +-
 .../solr/handler/UpdateRequestHandlerApi.java      |   4 +-
 .../handler/admin/AutoscalingHistoryHandler.java   |   2 +-
 .../apache/solr/handler/admin/BackupCoreOp.java    |   2 +-
 .../solr/handler/admin/BaseHandlerApiSupport.java  |   2 +-
 .../solr/handler/admin/CollectionHandlerApi.java   |   2 +-
 .../solr/handler/admin/CollectionsHandler.java     |  92 ++++-------
 .../solr/handler/admin/CoreAdminHandler.java       |   2 +-
 .../solr/handler/admin/CoreAdminOperation.java     |   2 +-
 .../solr/handler/admin/LukeRequestHandler.java     |   2 +-
 .../apache/solr/handler/admin/MergeIndexesOp.java  |   2 +-
 .../solr/handler/admin/MetricsHistoryHandler.java  |   6 +-
 .../solr/handler/admin/RequestApplyUpdatesOp.java  |   2 +-
 .../solr/handler/admin/ZookeeperInfoHandler.java   |   6 +-
 .../solr/handler/component/CloudReplicaSource.java |   2 +-
 .../solr/handler/component/HttpShardHandler.java   |   6 +-
 .../org/apache/solr/metrics/SolrMetricManager.java |  50 +++---
 .../reporters/solr/SolrClusterReporter.java        |   2 +-
 .../src/java/org/apache/solr/pkg/PackageAPI.java   |   8 +-
 .../request/PerSegmentSingleValuedFaceting.java    |   2 +-
 .../java/org/apache/solr/request/SimpleFacets.java |  37 ++---
 .../solr/rest/schema/FieldTypeXmlAdapter.java      |  16 +-
 .../org/apache/solr/schema/ManagedIndexSchema.java |   3 +-
 .../solr/schema/ManagedIndexSchemaFactory.java     |  18 +--
 .../apache/solr/schema/ZkIndexSchemaReader.java    |   2 +-
 .../org/apache/solr/search/SolrIndexSearcher.java  |   4 +-
 .../org/apache/solr/search/ValueSourceParser.java  |   2 +-
 .../apache/solr/security/AuditLoggerPlugin.java    |  38 ++---
 .../security/DelegationTokenKerberosFilter.java    |  16 +-
 .../apache/solr/servlet/SolrDispatchFilter.java    |  69 ++++-----
 .../apache/solr/spelling/suggest/Suggester.java    |   2 +-
 .../org/apache/solr/store/hdfs/HdfsDirectory.java  |   2 +-
 .../apache/solr/store/hdfs/HdfsLockFactory.java    |   2 +-
 .../apache/solr/update/DefaultSolrCoreState.java   |  41 +++--
 .../apache/solr/update/DirectUpdateHandler2.java   |   2 +-
 .../java/org/apache/solr/update/HdfsUpdateLog.java |   2 +-
 .../org/apache/solr/update/SolrCmdDistributor.java |  28 ++--
 .../org/apache/solr/update/SolrIndexWriter.java    |   9 +-
 .../org/apache/solr/update/TimedVersionBucket.java |   4 +-
 .../src/java/org/apache/solr/update/UpdateLog.java |  65 ++++----
 .../java/org/apache/solr/update/VersionBucket.java |   6 +-
 .../processor/DistributedUpdateProcessor.java      |   4 +-
 .../processor/DistributedZkUpdateProcessor.java    |  16 +-
 .../org/apache/solr/util/ConcurrentLRUCache.java   |  18 +--
 .../src/java/org/apache/solr/util/CryptoKeys.java  |  12 +-
 .../src/java/org/apache/solr/util/ExportTool.java  |  11 +-
 .../src/java/org/apache/solr/util/FSHDFSUtils.java |   2 +-
 .../src/java/org/apache/solr/util/FileUtils.java   |   2 +-
 .../src/java/org/apache/solr/util/PackageTool.java |   8 +-
 .../java/org/apache/solr/util/SimplePostTool.java  |   8 +-
 .../src/java/org/apache/solr/util/SolrCLI.java     |  78 +++++-----
 .../java/org/apache/solr/util/SolrLogPostTool.java |  17 ++-
 .../java/org/apache/solr/util/SolrPluginUtils.java |   5 +-
 .../java/org/apache/solr/util/SpatialUtils.java    |   6 +-
 .../org/apache/solr/util/StartupLoggingUtils.java  |   6 +-
 .../java/org/apache/solr/util/TestInjection.java   |  20 +--
 .../java/org/apache/solr/util/VersionedFile.java   |   6 +-
 .../solr/util/plugin/AbstractPluginLoader.java     |  13 +-
 ...rumentedPoolingHttpClientConnectionManager.java |   2 +-
 .../org/apache/solr/util/stats/MetricUtils.java    |   2 +-
 .../apache/solr/util/xslt/TransformerProvider.java |  21 ++-
 .../client/solrj/impl/ConnectionReuseTest.java     |  14 +-
 .../org/apache/solr/cloud/TestConfigSetsAPI.java   |  10 +-
 .../org/apache/solr/cloud/ZkShardTermsTest.java    |   6 +-
 .../org/apache/solr/cloud/ZkSolrClientTest.java    |  13 +-
 .../org/apache/solr/handler/TestBlobHandler.java   |   9 +-
 .../handler/TestSolrConfigHandlerConcurrent.java   |   7 +-
 .../metrics/reporters/SolrJmxReporterTest.java     |   3 +-
 .../org/apache/solr/client/solrj/SolrResponse.java |  18 +--
 .../client/solrj/beans/DocumentObjectBinder.java   |  39 +++--
 .../solr/client/solrj/cloud/SocketProxy.java       |  21 ++-
 .../apache/solr/client/solrj/cloud/ZNodeName.java  |   2 +-
 .../solrj/cloud/autoscaling/AutoScalingConfig.java |   6 +-
 .../client/solrj/cloud/autoscaling/Clause.java     |   2 +-
 .../client/solrj/cloud/autoscaling/Policy.java     |   4 +-
 .../solrj/cloud/autoscaling/PolicyHelper.java      |   6 +-
 .../solrj/cloud/autoscaling/VariableBase.java      |   2 +-
 .../client/solrj/impl/AsyncLBHttpSolrClient.java   |  12 +-
 .../client/solrj/impl/BaseCloudSolrClient.java     |  72 +++++----
 .../solrj/impl/BaseHttpClusterStateProvider.java   |   4 +-
 .../client/solrj/impl/CloudHttp2SolrClient.java    |   2 +-
 .../solr/client/solrj/impl/CloudSolrClient.java    |   2 +-
 .../impl/ConcurrentUpdateHttp2SolrClient.java      |  38 ++---
 .../solr/client/solrj/impl/Http2SolrClient.java    |  68 ++++-----
 .../solr/client/solrj/impl/HttpClientUtil.java     |  24 +--
 .../solr/client/solrj/impl/HttpSolrClient.java     |  53 +++----
 .../solr/client/solrj/impl/LBSolrClient.java       |  51 ++++---
 .../solrj/impl/SolrClientNodeStateProvider.java    |   4 +-
 .../client/solrj/impl/ZkDistribStateManager.java   |   4 +-
 .../solrj/io/eval/OLSRegressionEvaluator.java      |   4 +-
 .../client/solrj/io/graph/GatherNodesStream.java   |   4 +-
 .../client/solrj/io/graph/ShortestPathStream.java  |  17 ++-
 .../solr/client/solrj/io/sql/StatementImpl.java    |   2 +-
 .../client/solrj/io/stream/CloudSolrStream.java    |   4 +-
 .../solr/client/solrj/io/stream/DaemonStream.java  |   8 +-
 .../client/solrj/io/stream/DeepRandomStream.java   |   6 +-
 .../client/solrj/io/stream/ExceptionStream.java    |   4 +-
 .../client/solrj/io/stream/ExecutorStream.java     |   8 +-
 .../solr/client/solrj/io/stream/Facet2DStream.java |   2 +-
 .../solr/client/solrj/io/stream/FacetStream.java   |   2 +-
 .../solrj/io/stream/FeaturesSelectionStream.java   |   2 +-
 .../solr/client/solrj/io/stream/JDBCStream.java    |   2 +-
 .../solr/client/solrj/io/stream/KnnStream.java     |   3 +-
 .../client/solrj/io/stream/ParallelListStream.java |   2 +-
 .../client/solrj/io/stream/ParallelStream.java     |   2 +-
 .../solr/client/solrj/io/stream/RandomStream.java  |   3 +-
 .../client/solrj/io/stream/ScoreNodesStream.java   |   3 +-
 .../solr/client/solrj/io/stream/SearchStream.java  |   4 +-
 .../solr/client/solrj/io/stream/SolrStream.java    |   5 +-
 .../solr/client/solrj/io/stream/SqlStream.java     |   2 +-
 .../solr/client/solrj/io/stream/StatsStream.java   |   7 +-
 .../client/solrj/io/stream/TextLogitStream.java    |   2 +-
 .../client/solrj/io/stream/TimeSeriesStream.java   |   4 +-
 .../solr/client/solrj/io/stream/TopicStream.java   |   6 +-
 .../io/stream/expr/StreamExpressionParser.java     |   6 +-
 .../apache/solr/common/EmptyEntityResolver.java    |  12 +-
 .../src/java/org/apache/solr/common/ParWork.java   |  36 ++---
 .../apache/solr/common/PerThreadExecService.java   |  22 +--
 .../solr/common/cloud/CollectionProperties.java    |   5 +-
 .../solr/common/cloud/ConnectionManager.java       |  23 +--
 .../solr/common/cloud/NodesSysPropsCacher.java     |   4 +-
 .../org/apache/solr/common/cloud/SolrZkClient.java |  68 ++++-----
 .../apache/solr/common/cloud/SolrZooKeeper.java    |  19 ++-
 .../apache/solr/common/cloud/ZkStateReader.java    |  71 ++++-----
 .../solr/common/cloud/rule/ImplicitSnitch.java     |   4 +-
 .../apache/solr/common/params/ConfigSetParams.java |   6 +-
 .../org/apache/solr/common/params/SolrParams.java  |   4 +-
 .../apache/solr/common/util/ContentStreamBase.java |   2 +-
 .../java/org/apache/solr/common/util/IOUtils.java  |   2 +-
 .../apache/solr/common/util/OrderedExecutor.java   |   9 +-
 .../org/apache/solr/common/util/TimeSource.java    |   2 +-
 .../java/org/apache/solr/common/util/Utils.java    |  71 ++++-----
 .../apache/solr/common/util/ValidatingJsonMap.java |   5 +-
 .../solr/client/solrj/LargeVolumeTestBase.java     |   2 +-
 .../apache/solr/BaseDistributedSearchTestCase.java |  17 +--
 .../java/org/apache/solr/SolrJettyTestBase.java    |  22 +--
 .../src/java/org/apache/solr/SolrTestCase.java     |  11 +-
 .../src/java/org/apache/solr/SolrTestCaseHS.java   |   2 +-
 .../src/java/org/apache/solr/SolrTestCaseJ4.java   | 105 ++++++-------
 .../analysis/StringMockSolrResourceLoader.java     |   4 +-
 .../solr/cloud/AbstractFullDistribZkTestBase.java  |  28 ++--
 .../java/org/apache/solr/cloud/ChaosMonkey.java    |  10 +-
 .../org/apache/solr/cloud/CloudInspectUtil.java    |  16 +-
 .../apache/solr/cloud/MiniSolrCloudCluster.java    |  21 +--
 .../org/apache/solr/cloud/SolrCloudTestCase.java   |   2 +-
 .../apache/solr/cloud/StoppableIndexingThread.java |   4 +-
 .../apache/solr/cloud/StoppableSearchThread.java   |   4 +-
 .../java/org/apache/solr/cloud/ZkTestServer.java   |  14 +-
 .../solr/core/AbstractBadConfigTestBase.java       |   8 +-
 .../java/org/apache/solr/util/ExternalPaths.java   |   6 +-
 .../java/org/apache/solr/util/RestTestBase.java    |  15 +-
 .../java/org/apache/solr/util/RestTestHarness.java |  22 +--
 .../java/org/apache/solr/util/SSLTestConfig.java   |   9 +-
 .../src/java/org/apache/solr/util/TestHarness.java |  22 +--
 265 files changed, 1986 insertions(+), 2221 deletions(-)

diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/AnalyticsShardRequestManager.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/AnalyticsShardRequestManager.java
index 12e7b2c..3bc02a1 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/AnalyticsShardRequestManager.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/AnalyticsShardRequestManager.java
@@ -47,9 +47,7 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.handler.AnalyticsHandler;
 import org.apache.solr.handler.component.AnalyticsComponent;
 import org.apache.solr.response.AnalyticsShardResponseWriter;
@@ -164,7 +162,7 @@ public class AnalyticsShardRequestManager {
         }
       }
     } catch (InterruptedException e1) {
-      ParWork.propegateInterrupt(e1);
+      ParWork.propagateInterrupt(e1);
       throw new RuntimeException(e1);
     } catch (ExecutionException e1) {
       throw new RuntimeException(e1);
diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
index 29ddd17..f7c558b 100644
--- a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
+++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
@@ -28,7 +28,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -268,7 +267,7 @@ public class AnnotatedApi extends Api implements PermissionNameProvider {
         log.error("Error executing command ", ite);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, ite.getCause());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Error executing command : ", e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
diff --git a/solr/core/src/java/org/apache/solr/api/ApiBag.java b/solr/core/src/java/org/apache/solr/api/ApiBag.java
index 12b8db9..e67cf45 100644
--- a/solr/core/src/java/org/apache/solr/api/ApiBag.java
+++ b/solr/core/src/java/org/apache/solr/api/ApiBag.java
@@ -84,7 +84,7 @@ public class ApiBag {
     try {
       validateAndRegister(api, nameSubstitutes);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Unable to register plugin: {} with spec {} :", api.getClass().getName(), Utils.toJSONString(api.getSpec()), e);
       if (e instanceof RuntimeException) {
         throw (RuntimeException) e;
@@ -204,7 +204,7 @@ public class ApiBag {
       try {
         validators.put((String) cmd.getKey(), new JsonSchemaValidator((Map) cmd.getValue()));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error in api spec", e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
index ac1d6dc..cd28b12 100644
--- a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
+++ b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
@@ -215,7 +215,7 @@ public class V2HttpCall extends HttpSolrCall {
     try {
       zkStateReader.aliasesManager.update();
     } catch (Exception e) {
-      ParWork.propegateInterrupt("Error trying to update state while resolving collection.", e);
+      ParWork.propagateInterrupt("Error trying to update state while resolving collection.", e);
       if (e instanceof KeeperException.SessionExpiredException) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
@@ -342,7 +342,7 @@ public class V2HttpCall extends HttpSolrCall {
     try {
       api.call(this.solrReq, solrResp);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       solrResp.setException(e);
     }
   }
@@ -357,7 +357,7 @@ public class V2HttpCall extends HttpSolrCall {
       try {
         api.call(solrReq, rsp);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         rsp.setException(e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
index fc795e6..8a18402 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
@@ -16,8 +16,6 @@
  */
 package org.apache.solr.client.solrj.embedded;
 
-import static org.apache.solr.common.params.CommonParams.PATH;
-
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -61,6 +59,8 @@ import org.apache.solr.response.ResultContext;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.servlet.SolrRequestParsers;
 
+import static org.apache.solr.common.params.CommonParams.PATH;
+
 /**
  * SolrClient that connects directly to a CoreContainer.
  *
@@ -179,7 +179,7 @@ public class EmbeddedSolrServer extends SolrClient {
       } catch (IOException | SolrException iox) {
         throw iox;
       } catch (Exception ex) {
-        ParWork.propegateInterrupt(ex);
+        ParWork.propagateInterrupt(ex);
         throw new SolrServerException(ex);
       }
     }
@@ -260,7 +260,7 @@ public class EmbeddedSolrServer extends SolrClient {
             }
           }
         } catch (Exception ex) {
-          ParWork.propegateInterrupt(ex);
+          ParWork.propagateInterrupt(ex);
           throw new RuntimeException(ex);
         }
       }
@@ -271,7 +271,7 @@ public class EmbeddedSolrServer extends SolrClient {
     } catch (IOException | SolrException iox) {
       throw iox;
     } catch (Exception ex) {
-      ParWork.propegateInterrupt(ex);
+      ParWork.propagateInterrupt(ex);
       throw new SolrServerException(ex);
     } finally {
       if (req != null) req.close();
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
index b36419b..b3ebc0a 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
@@ -16,11 +16,38 @@
  */
 package org.apache.solr.client.solrj.embedded;
 
+import javax.servlet.DispatcherType;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.Closeable;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.BindException;
+import java.net.URI;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+
 import org.apache.lucene.util.Constants;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.cloud.SocketProxy;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
-import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
@@ -29,13 +56,11 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.common.util.SolrQueuedThreadPool;
 import org.apache.solr.common.util.SolrScheduledExecutorScheduler;
-import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.core.CloudConfig;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.NodeConfig;
 import org.apache.solr.servlet.SolrDispatchFilter;
 import org.apache.solr.servlet.SolrQoSFilter;
-import org.apache.solr.util.TimeOut;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
@@ -55,7 +80,6 @@ import org.eclipse.jetty.server.ServerConnector;
 import org.eclipse.jetty.server.SessionIdManager;
 import org.eclipse.jetty.server.SslConnectionFactory;
 import org.eclipse.jetty.server.handler.HandlerWrapper;
-import org.eclipse.jetty.server.handler.ShutdownHandler;
 import org.eclipse.jetty.server.handler.gzip.GzipHandler;
 import org.eclipse.jetty.server.session.HouseKeeper;
 import org.eclipse.jetty.server.session.SessionHandler;
@@ -65,41 +89,11 @@ import org.eclipse.jetty.servlet.ServletHolder;
 import org.eclipse.jetty.servlet.Source;
 import org.eclipse.jetty.util.component.LifeCycle;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
 import org.eclipse.jetty.util.thread.Scheduler;
-import org.eclipse.jetty.util.thread.ShutdownThread;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.MDC;
 
-import javax.servlet.DispatcherType;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.Closeable;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.net.BindException;
-import java.net.URI;
-import java.util.EnumSet;
-import java.util.LinkedList;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-
 /**
  * Run solr using jetty
  *
@@ -287,7 +281,7 @@ public class JettySolrRunner implements Closeable {
       try {
         proxy = new SocketProxy(0, config.sslConfig != null && config.sslConfig.isSSLMode());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new RuntimeException(e);
       }
       setProxyPort(proxy.getListenPort());
@@ -609,14 +603,14 @@ public class JettySolrRunner implements Closeable {
               }
               log.info("Done waiting on latch");
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e);
             }
           }
         } catch (KeeperException e) {
           throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e);
         }
 
@@ -742,7 +736,7 @@ public class JettySolrRunner implements Closeable {
               reader.waitForLiveNodes(10, TimeUnit.SECONDS, (o, n) -> !n.contains(nodeName));
             }
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           } catch (TimeoutException e) {
             log.error("Timeout waiting for live node");
           }
@@ -1062,7 +1056,7 @@ public class JettySolrRunner implements Closeable {
         } catch (KeeperException e) {
           SolrException.log(log, e);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java b/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java
index 85c37a3..f2d5ead 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ActionThrottle.java
@@ -17,7 +17,6 @@
 package org.apache.solr.cloud;
 
 import java.lang.invoke.MethodHandles;
-
 import java.util.concurrent.TimeUnit;
 
 import org.apache.solr.common.ParWork;
@@ -85,7 +84,7 @@ public class ActionThrottle {
       try {
         timeSource.sleep(sleep);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java b/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java
index 3b8aa61..d7bb4a5 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudConfigSetService.java
@@ -16,6 +16,8 @@
  */
 package org.apache.solr.cloud;
 
+import java.lang.invoke.MethodHandles;
+
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ZkConfigManager;
@@ -31,8 +33,6 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.invoke.MethodHandles;
-
 /**
  * SolrCloud ConfigSetService impl.
  */
@@ -77,7 +77,7 @@ public class CloudConfigSetService extends ConfigSetService {
       log.warn("Unexpected exception when getting modification time of {}", zkPath, e);
       return null; // debatable; we'll see an error soon if there's a real problem
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
     if (stat == null) { // not found
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
index 5321b1d..d5361f8 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
@@ -144,7 +144,7 @@ public class CloudUtil {
       log.info("Error fetching key names");
       return Collections.EMPTY_MAP;
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR,"Unable to read crypto keys",e );
     } catch (KeeperException e) {
       throw new SolrException(ErrorCode.SERVER_ERROR,"Unable to read crypto keys",e );
@@ -176,7 +176,7 @@ public class CloudUtil {
         return predicate.matches(n, c);
       });
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new AssertionError(message + "\n" + "Live Nodes: " + liveNodesLastSeen.get() + "\nLast available state: " + state.get(), e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java b/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
index 4fe5537..b938728 100644
--- a/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
+++ b/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
@@ -16,6 +16,15 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
 import org.apache.solr.cloud.ZkController.ContextKey;
 import org.apache.solr.common.AlreadyClosedException;
 import org.apache.solr.common.ParWork;
@@ -31,15 +40,6 @@ import org.apache.zookeeper.Watcher.Event.EventType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
 /**
  * Leader Election process. This class contains the logic by which a
  * leader is chosen. First call * {@link #setup(ElectionContext)} to ensure
@@ -338,14 +338,14 @@ public  class LeaderElector {
         try {
           zkClient.delete(myNode, -1);
         } catch (AlreadyClosedException | InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.info("Already shutting down");
           return;
         } catch (KeeperException.NoNodeException nne) {
           log.info("No znode found to delete at {}", myNode);
           // expected . don't do anything
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception canceling election", e);
         }
         return;
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index df227a1..b6698f9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -16,6 +16,21 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.LongAdder;
+import java.util.function.BiConsumer;
+
 import org.apache.lucene.util.Version;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
@@ -65,24 +80,6 @@ import org.apache.zookeeper.Watcher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.params.CommonParams.ID;
-import java.io.Closeable;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.LongAccumulator;
-import java.util.concurrent.atomic.LongAdder;
-import java.util.function.BiConsumer;
-
 /**
  * <p>Cluster leader. Responsible for processing state updates, node assignments, creating/deleting
  * collections, shards, replicas and setting various properties.</p>
@@ -251,7 +248,7 @@ public class Overseer implements SolrCloseable {
                 try {
                   processQueueItem(message, getZkStateReader().getClusterState(), zkStateWriter, false, null);
                 } catch (InterruptedException | AlreadyClosedException e) {
-                  ParWork.propegateInterrupt(e);
+                  ParWork.propagateInterrupt(e);
                   return;
                 } catch (KeeperException.SessionExpiredException e) {
                   log.warn("Solr cannot talk to ZK, exiting Overseer work queue loop", e);
@@ -266,7 +263,7 @@ public class Overseer implements SolrCloseable {
                       fallbackQueue.poll();
                     }
                   } catch (InterruptedException e1) {
-                    ParWork.propegateInterrupt(e);
+                    ParWork.propagateInterrupt(e);
                     return;
                   } catch (Exception e1) {
                     exp.addSuppressed(e1);
@@ -289,7 +286,7 @@ public class Overseer implements SolrCloseable {
               log.warn("Solr cannot talk to ZK, exiting Overseer work queue loop", e);
               return;
             } catch (InterruptedException | AlreadyClosedException e) {
-              ParWork.propegateInterrupt(e, true);
+              ParWork.propagateInterrupt(e, true);
               return;
             } catch (Exception e) {
               log.error("Unexpected error in Overseer state update loop", e);
@@ -312,7 +309,7 @@ public class Overseer implements SolrCloseable {
 //            }
             queue = new LinkedList<>(stateUpdateQueue.peekElements(1000, wait, (x) -> true));
           } catch (InterruptedException | AlreadyClosedException e) {
-            ParWork.propegateInterrupt(e, true);
+            ParWork.propagateInterrupt(e, true);
             return;
           } catch (KeeperException.SessionExpiredException e) {
             log.error("run()", e);
@@ -366,7 +363,7 @@ public class Overseer implements SolrCloseable {
             stateUpdateQueue.remove(processedNodes);
             processedNodes.clear();
           } catch (InterruptedException | AlreadyClosedException e) {
-            ParWork.propegateInterrupt(e, true);
+            ParWork.propagateInterrupt(e, true);
             return;
           } catch (KeeperException.SessionExpiredException e) {
             log.error("run()", e);
@@ -512,7 +509,7 @@ public class Overseer implements SolrCloseable {
               getSolrCloudManager().getDistribStateManager().makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName + "/terms/" + shardName, ZkStateReader.emptyJson, CreateMode.PERSISTENT, false);
 
             } catch (KeeperException | AlreadyExistsException | IOException | InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
             }
             return ret;
@@ -718,10 +715,10 @@ public class Overseer implements SolrCloseable {
       log.warn("ZooKeeper session expired");
       return;
     } catch (InterruptedException | AlreadyClosedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       return;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Unexpected error in Overseer state update loop", e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java
index 42a8d87..ab95e2a 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerConfigSetMessageHandler.java
@@ -16,6 +16,17 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
@@ -33,17 +44,6 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.lang.invoke.MethodHandles;
-import java.nio.charset.StandardCharsets;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.params.ConfigSetParams.ConfigSetAction.CREATE;
 import static org.apache.solr.common.util.Utils.toJSONString;
@@ -119,7 +119,7 @@ public class OverseerConfigSetMessageHandler implements OverseerMessageHandler {
     } catch (Exception e) {
       // interrupt not currently thrown here, but it could be - I
       // usually like to use a utility everywhere for this reason
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
 
       String configSetName = message.getStr(NAME);
 
@@ -326,7 +326,7 @@ public class OverseerConfigSetMessageHandler implements OverseerMessageHandler {
                   propertyData, CreateMode.PERSISTENT, null, false, true);
         }
       } catch (KeeperException | InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new IOException("Error writing new properties", e);
       }
     } catch (Exception e) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java
index a607ef1..1ec1e23 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java
@@ -120,7 +120,7 @@ final class OverseerElectionContext extends ShardLeaderElectionContextBase {
           try {
             super.cancelElection();
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.error("Exception closing Overseer", e);
           }
         });
@@ -129,7 +129,7 @@ final class OverseerElectionContext extends ShardLeaderElectionContextBase {
         try {
           overseer.doClose(fromCSUpdateThread);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Exception closing Overseer", e);
         }
       });
@@ -147,7 +147,7 @@ final class OverseerElectionContext extends ShardLeaderElectionContextBase {
     try {
       cancelElection(fromCSUpdateThread);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Exception canceling election", e);
     }
     super.close();
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
index c38b63f..1878c1d 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
@@ -16,6 +16,18 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.Closeable;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.function.Predicate;
+
 import com.codahale.metrics.Timer;
 import com.google.common.collect.ImmutableSet;
 import org.apache.solr.cloud.OverseerTaskQueue.QueueEvent;
@@ -36,20 +48,6 @@ import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
 import static org.apache.solr.common.params.CommonParams.ID;
-import java.io.Closeable;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentSkipListMap;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.function.Predicate;
 
 /**
  * A generic processor run in the Overseer, used for handling items added
@@ -160,7 +158,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
       // async calls.
       SolrException.log(log, "", e);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if (e instanceof KeeperException.SessionExpiredException) {
         return;
       }
@@ -180,7 +178,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
       try {
         prioritizer.prioritizeOverseerNodes(myId);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (e instanceof KeeperException.SessionExpiredException) {
           return;
         }
@@ -267,7 +265,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
             }
           } catch (Exception e) {
             if (e instanceof KeeperException.SessionExpiredException || e instanceof InterruptedException) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               log.error("ZooKeeper session has expired");
               return;
             }
@@ -280,7 +278,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
         }
 
       } catch (InterruptedException | AlreadyClosedException e) {
-        ParWork.propegateInterrupt(e, true);
+        ParWork.propagateInterrupt(e, true);
         return;
       } catch (KeeperException.SessionExpiredException e) {
         log.warn("Zookeeper expiration");
@@ -481,7 +479,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
         taskFutures.remove(this);
         success = true;
       } catch (InterruptedException | AlreadyClosedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return;
       } catch (Exception e) {
         if (e instanceof KeeperException.SessionExpiredException) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
index c83f397..d496d9d 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
@@ -16,6 +16,17 @@
  */
 package org.apache.solr.cloud;
 
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.LongAdder;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Predicate;
+
 import com.codahale.metrics.Timer;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -30,18 +41,6 @@ import org.apache.zookeeper.Watcher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.LongAdder;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.function.Predicate;
-
 /**
  * A {@link ZkDistributedQueue} augmented with helper methods specific to the overseer task queues.
  * Methods specific to this subclass ignore superclass internal state and hit ZK directly.
@@ -67,7 +66,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
       try {
         Thread.sleep(250);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Interrupted while waiting for overseer queue to drain before shutdown!");
       }
     }
@@ -168,7 +167,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
         try {
           lock.lockInterruptibly();
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return;
         }
         try {
@@ -193,7 +192,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
           try {
             eventReceived.await(timeoutMs, TimeUnit.MILLISECONDS);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw e;
           }
         }
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveringCoreTermWatcher.java b/solr/core/src/java/org/apache/solr/cloud/RecoveringCoreTermWatcher.java
index 2f22b6a..432e0e2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveringCoreTermWatcher.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveringCoreTermWatcher.java
@@ -63,7 +63,7 @@ public class RecoveringCoreTermWatcher implements ZkShardTerms.CoreTermWatcher {
         solrCore.getUpdateHandler().getSolrCoreState().doRecovery(solrCore.getCoreContainer(), solrCore.getCoreDescriptor());
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if (log.isInfoEnabled()) {
         log.info("Failed to watch term of core {}", coreDescriptor.getName(), e);
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
index 021db42..6ccc957 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
@@ -16,6 +16,18 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicInteger;
+
 import org.apache.http.client.methods.HttpUriRequest;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.search.MatchAllDocsQuery;
@@ -37,7 +49,6 @@ import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.cloud.ZooKeeperException;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.NamedList;
@@ -56,7 +67,6 @@ import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.PeerSyncWithLeader;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.UpdateLog.RecoveryInfo;
-import org.apache.solr.update.UpdateShardHandlerConfig;
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.solr.util.RefCounted;
 import org.apache.solr.util.SolrPluginUtils;
@@ -64,18 +74,6 @@ import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.Closeable;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicInteger;
-
 /**
  * This class may change in future and customisations are not supported between versions in terms of API or back compat
  * behaviour.
@@ -314,7 +312,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
           searchHolder.decref();
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.debug("Error in solrcloud_debug block", e);
       }
     }
@@ -353,12 +351,12 @@ public class RecoveryStrategy implements Runnable, Closeable {
         try {
           doRecovery(core);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e, true);
+          ParWork.propagateInterrupt(e, true);
           return;
         } catch (AlreadyClosedException e) {
           return;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("", e);
           return;
         }
@@ -445,13 +443,13 @@ public class RecoveryStrategy implements Runnable, Closeable {
           log.info("Replication Recovery was successful.");
           successfulRecovery = true;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           SolrException.log(log, "Error while trying to recover", e);
           return;
         }
 
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, "Error while trying to recover. core=" + coreName, e);
       } finally {
         if (successfulRecovery) {
@@ -461,7 +459,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
           try {
             zkController.publish(this.coreDescriptor, Replica.State.ACTIVE);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.error("Could not publish as ACTIVE after succesful recovery", e);
             successfulRecovery = false;
           }
@@ -494,16 +492,16 @@ public class RecoveryStrategy implements Runnable, Closeable {
             try {
               recoveryFailed(core, zkController, baseUrl, coreZkNodeName, this.coreDescriptor);
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               return;
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               SolrException.log(log, "Could not publish that recovery failed", e);
             }
             break;
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           SolrException.log(log, "An error has occurred during recovery", e);
         }
 
@@ -525,7 +523,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
             Thread.sleep(startingRecoveryDelayMilliSeconds);
           }
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Recovery was interrupted.", e);
           close = true;
         }
@@ -556,7 +554,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
     try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) {
       recentVersions = recentUpdates.getVersions(ulog.getNumRecordsToKeep());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       SolrException.log(log, "Corrupt tlog - ignoring.", e);
       recentVersions = new ArrayList<>(0);
     }
@@ -589,7 +587,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
           }
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);;
+        ParWork.propagateInterrupt(e);;
         SolrException.log(log, "Error getting recent versions.", e);
         recentVersions = new ArrayList<>(0);
       }
@@ -608,7 +606,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
           firstTime = false; // skip peersync
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, "Error trying to get ulog starting operation.", e);
         firstTime = false; // skip peersync
       }
@@ -676,7 +674,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
         try {
           Thread.sleep(waitForUpdatesWithStaleStatePauseMilliSeconds);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
         }
 
@@ -742,7 +740,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
           log.info("Replication Recovery was successful.");
           successfulRecovery = true;
         } catch (InterruptedException | AlreadyClosedException e) {
-          ParWork.propegateInterrupt(e, true);
+          ParWork.propagateInterrupt(e, true);
           return;
         } catch (Exception e) {
           SolrException.log(log, "Error while trying to recover", e);
@@ -762,7 +760,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
             }
             zkController.publish(this.coreDescriptor, Replica.State.ACTIVE);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             close = true;
           } catch (Exception e) {
             log.error("Could not publish as ACTIVE after succesful recovery", e);
@@ -794,7 +792,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
             try {
               recoveryFailed(core, zkController, baseUrl, coreZkNodeName, this.coreDescriptor);
             } catch(InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               return;
             }  catch
             (Exception e) {
@@ -821,7 +819,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
             Thread.sleep(startingRecoveryDelayMilliSeconds);
           }
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e, true);
+          ParWork.propagateInterrupt(e, true);
           return;
         }
       }
@@ -886,7 +884,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
         // let the recovery throttle handle pauses
         log.error("Failed to connect leader {} on recovery, try again", leaderReplica.getBaseUrl());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (e.getCause() instanceof IOException) {
           log.error("Failed to connect leader {} on recovery, try again", leaderReplica.getBaseUrl());
         } else {
@@ -922,7 +920,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
       try {
         report = future.get(10, TimeUnit.MINUTES); // nocommit - how long? make configurable too
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new InterruptedException();
       } catch (TimeoutException e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
@@ -957,7 +955,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
         searchHolder.decref();
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.debug("Error in solrcloud_debug block", e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
index 8003ee1..44308df 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
@@ -122,7 +122,7 @@ public class ReplicateFromLeader implements Closeable {
       if (commitVersion == null) return null;
       else return commitVersion;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Cannot get commit command version from index commit point ",e);
       return null;
     }
@@ -148,7 +148,7 @@ public class ReplicateFromLeader implements Closeable {
     try {
       stopReplication();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
     }
     ObjectReleaseTracker.release(this);
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java
index 3f5587e..6fa82f2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java
@@ -20,8 +20,6 @@ import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.solr.cloud.overseer.OverseerAction;
@@ -210,7 +208,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
           result = syncStrategy.sync(zkController, core, leaderProps, weAreReplacement);
           success = result.isSuccess();
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
         }
         if (isClosed()) {
@@ -255,7 +253,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
               searchHolder.decref();
             }
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(ErrorCode.SERVER_ERROR, e);
           }
         }
@@ -325,11 +323,11 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
           log.info("I am the new leader: " + ZkCoreNodeProps.getCoreUrl(leaderProps) + " " + shardId);
 
         } catch (AlreadyClosedException | InterruptedException e) {
-          ParWork.propegateInterrupt("Already closed or interrupted, bailing..", e);
+          ParWork.propagateInterrupt("Already closed or interrupted, bailing..", e);
           return;
         } catch (Exception e) {
           SolrException.log(log, "There was a problem trying to register as the leader", e);
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           if (isClosed()) {
             return;
           }
@@ -352,7 +350,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
                 }
                 rejoinLeaderElection(core);
               } catch (Exception exc) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 throw new SolrException(ErrorCode.SERVER_ERROR, e);
               }
             }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java
index 964bdd3..443aa05 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java
@@ -20,11 +20,9 @@ package org.apache.solr.cloud;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.nio.file.Paths;
+import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.ArrayList;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.solr.common.AlreadyClosedException;
 import org.apache.solr.common.ParWork;
@@ -62,7 +60,7 @@ class ShardLeaderElectionContextBase extends ElectionContext {
     try {
       super.close();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Exception canceling election", e);
     }
   }
@@ -117,7 +115,7 @@ class ShardLeaderElectionContextBase extends ElectionContext {
             }
 
           } catch (InterruptedException | AlreadyClosedException e) {
-            ParWork.propegateInterrupt(e, true);
+            ParWork.propagateInterrupt(e, true);
             return;
           } catch (Exception e) {
             throw new SolrException(ErrorCode.SERVER_ERROR, "Exception canceling election", e);
@@ -127,7 +125,7 @@ class ShardLeaderElectionContextBase extends ElectionContext {
         }
       } catch (Exception e) {
         if (e instanceof InterruptedException) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
         Stat stat = new Stat();
         zkClient.getData(Paths.get(leaderPath).getParent().toString(), null, stat);
@@ -176,7 +174,7 @@ class ShardLeaderElectionContextBase extends ElectionContext {
       // assert leaderZkNodeParentVersion != null;
 
     } catch (Throwable t) {
-      ParWork.propegateInterrupt(t);
+      ParWork.propagateInterrupt(t);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Could not register as the leader because creating the ephemeral registration node in ZooKeeper failed: " + errors, t);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
index 6a322aa..21d3c43 100644
--- a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
@@ -16,16 +16,6 @@
  */
 package org.apache.solr.cloud;
 
-import org.apache.solr.common.ParWork;
-import org.apache.solr.common.SolrException;
-import org.apache.zookeeper.server.ServerConfig;
-import org.apache.zookeeper.server.ZooKeeperServerMain;
-import org.apache.zookeeper.server.quorum.QuorumPeer;
-import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
-import org.apache.zookeeper.server.quorum.QuorumPeerMain;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.Closeable;
 import java.io.File;
 import java.io.FileInputStream;
@@ -40,6 +30,16 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.regex.Pattern;
 
+import org.apache.solr.common.ParWork;
+import org.apache.solr.common.SolrException;
+import org.apache.zookeeper.server.ServerConfig;
+import org.apache.zookeeper.server.ZooKeeperServerMain;
+import org.apache.zookeeper.server.quorum.QuorumPeer;
+import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
+import org.apache.zookeeper.server.quorum.QuorumPeerMain;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 public class SolrZkServer implements Closeable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -124,7 +124,7 @@ public class SolrZkServer implements Closeable {
           }
           log.info("ZooKeeper Server exited.");
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("ZooKeeper Server ERROR", e);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
         }
diff --git a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
index f23bf7a..17f5fd2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
@@ -16,6 +16,11 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.Closeable;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
@@ -35,10 +40,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.CommonParams.DISTRIB;
-import java.io.Closeable;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.List;
 
 public class SyncStrategy implements Closeable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -121,7 +122,7 @@ public class SyncStrategy implements Closeable {
           shardId, peerSyncOnlyWithActive);
       success = result.isSuccess();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       SolrException.log(log, "Sync Failed", e);
     }
     try {
@@ -141,7 +142,7 @@ public class SyncStrategy implements Closeable {
       }
       
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       SolrException.log(log, "Sync Failed", e);
     }
     
@@ -211,7 +212,7 @@ public class SyncStrategy implements Closeable {
         requestSync(node.getBaseUrl(), node.getCoreUrl(), zkLeader.getCoreUrl(), node.getCoreName(), nUpdates);
         
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, "Error syncing replica to leader", e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 6be434d..93c0dab 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -16,6 +16,43 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.lang.invoke.MethodHandles;
+import java.net.HttpURLConnection;
+import java.net.InetAddress;
+import java.net.MalformedURLException;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Supplier;
+
 import com.google.common.base.Strings;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.solr.client.solrj.cloud.DistributedLock;
@@ -29,28 +66,8 @@ import org.apache.solr.common.AlreadyClosedException;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.BeforeReconnect;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.ConnectionManager;
-import org.apache.solr.common.cloud.DefaultZkACLProvider;
-import org.apache.solr.common.cloud.DefaultZkCredentialsProvider;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.DocCollectionWatcher;
-import org.apache.solr.common.cloud.LiveNodesListener;
-import org.apache.solr.common.cloud.NodesSysPropsCacher;
-import org.apache.solr.common.cloud.OnReconnect;
-import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.*;
 import org.apache.solr.common.cloud.Replica.Type;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.ZkACLProvider;
-import org.apache.solr.common.cloud.ZkConfigManager;
-import org.apache.solr.common.cloud.ZkCoreNodeProps;
-import org.apache.solr.common.cloud.ZkCredentialsProvider;
-import org.apache.solr.common.cloud.ZkMaintenanceUtils;
-import org.apache.solr.common.cloud.ZkNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.cloud.ZooKeeperException;
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.CoreAdminParams;
@@ -98,42 +115,6 @@ import static org.apache.solr.common.cloud.ZkStateReader.NODE_NAME_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.REJOIN_AT_HEAD_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
-import java.io.Closeable;
-import java.io.File;
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.lang.invoke.MethodHandles;
-import java.net.HttpURLConnection;
-import java.net.InetAddress;
-import java.net.MalformedURLException;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.net.URL;
-import java.net.URLEncoder;
-import java.net.UnknownHostException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.function.Supplier;
 
 /**
  * Handle ZooKeeper interactions.
@@ -371,7 +352,7 @@ public class ZkController implements Closeable {
         zkACLProvider = new DefaultZkACLProvider();
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Exception during ZkController init", e);
       throw e;
     }
@@ -389,7 +370,7 @@ public class ZkController implements Closeable {
       try {
         context.cancelElection();
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
       } catch (KeeperException e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
@@ -490,7 +471,7 @@ public class ZkController implements Closeable {
                     parWork.collect(new RegisterCoreAsync(descriptor, true, true));
 
                   } catch (Exception e) {
-                    ParWork.propegateInterrupt(e);
+                    ParWork.propagateInterrupt(e);
                     SolrException.log(log, "Error registering SolrCore", e);
                   }
                 }
@@ -512,7 +493,7 @@ public class ZkController implements Closeable {
               }
             }
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new ZooKeeperException(
                     SolrException.ErrorCode.SERVER_ERROR, "", e);
           } catch (SessionExpiredException e) {
@@ -590,7 +571,7 @@ public class ZkController implements Closeable {
             log.info("Publish this node as DOWN...");
             publishNodeAsDown(getNodeName());
           } catch (Exception e) {
-            ParWork.propegateInterrupt("Error publishing nodes as down. Continuing to close CoreContainer", e);
+            ParWork.propagateInterrupt("Error publishing nodes as down. Continuing to close CoreContainer", e);
           }
           return "PublishDown";
 
@@ -600,7 +581,7 @@ public class ZkController implements Closeable {
           try {
             removeEphemeralLiveNode();
           } catch (Exception e) {
-            ParWork.propegateInterrupt("Error Removing ephemeral live node. Continuing to close CoreContainer", e);
+            ParWork.propagateInterrupt("Error Removing ephemeral live node. Continuing to close CoreContainer", e);
           }
           return "RemoveEphemNode";
 
@@ -686,7 +667,7 @@ public class ZkController implements Closeable {
           props.put(CoreAdminParams.NODE, getNodeName());
           getOverseerCollectionQueue().offer(Utils.toJSON(new ZkNodeProps(props)));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           // Exceptions are not bubbled up. giveupLeadership is best effort, and is only called in case of some other
           // unrecoverable error happened
           log.error("Met exception on give up leadership for {}", key, e);
@@ -781,7 +762,7 @@ public class ZkController implements Closeable {
             }
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           SolrException.log(log,
               "Error while looking for a better host name than 127.0.0.1", e);
         }
@@ -985,7 +966,7 @@ public class ZkController implements Closeable {
       } catch (KeeperException e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
       }
       try {
@@ -1017,7 +998,7 @@ public class ZkController implements Closeable {
             try {
               createClusterZkNodes(zkClient);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               log.error("Failed creating initial zk layout", e);
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
             }
@@ -1119,7 +1100,7 @@ public class ZkController implements Closeable {
             } catch (KeeperException e) {
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
             } catch (IOException e) {
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
@@ -1136,7 +1117,7 @@ public class ZkController implements Closeable {
                 publishAndWaitForDownStates();
               }
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
             } catch (KeeperException e) {
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
@@ -1148,7 +1129,7 @@ public class ZkController implements Closeable {
 
         //  publishAndWaitForDownStates();
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
                 "", e);
       } catch (KeeperException e) {
@@ -1192,7 +1173,7 @@ public class ZkController implements Closeable {
           try {
             createNodes = zkStateReader.getAutoScalingConfig().hasTriggerForEvents(TriggerEventType.NODELOST);
           } catch (KeeperException | InterruptedException e1) {
-            ParWork.propegateInterrupt(e1);
+            ParWork.propagateInterrupt(e1);
             log.warn("Unable to read autoscaling.json", e1);
           }
           if (createNodes) {
@@ -1205,7 +1186,7 @@ public class ZkController implements Closeable {
               } catch (KeeperException.NodeExistsException e) {
                 // someone else already created this node - ignore
               } catch (KeeperException | InterruptedException e1) {
-                ParWork.propegateInterrupt(e1);
+                ParWork.propagateInterrupt(e1);
                 log.warn("Unable to register nodeLost path for {}", n, e1);
               }
             }
@@ -1251,7 +1232,7 @@ public class ZkController implements Closeable {
                 log.error("Exception on proper shutdown", e);
                 return;
               } catch (InterruptedException e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 return;
               }
             }
@@ -1261,7 +1242,7 @@ public class ZkController implements Closeable {
         log.error("Time out waiting to see solr live nodes go down " + children.size());
         return;
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return;
       }
 
@@ -1270,7 +1251,7 @@ public class ZkController implements Closeable {
         try {
           success = latch.await(10, TimeUnit.SECONDS);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return;
         }
         if (!success) {
@@ -1415,7 +1396,7 @@ public class ZkController implements Closeable {
         zkClient.getSolrZooKeeper().create(nodePath, null, zkClient.getZkACLProvider().getACLsToAdd(nodePath), CreateMode.EPHEMERAL);
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
   }
@@ -1527,7 +1508,7 @@ public class ZkController implements Closeable {
           startReplicationFromLeader(coreName, false);
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return null;
       } catch (KeeperException | IOException e) {
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
@@ -1649,7 +1630,7 @@ public class ZkController implements Closeable {
       try {
         prev.close();
       } catch (Exception e) {
-        ParWork.propegateInterrupt("Error closing previous replication attempt", e);
+        ParWork.propagateInterrupt("Error closing previous replication attempt", e);
       }
       if (isClosed()) throw new AlreadyClosedException();
       replicateFromLeader.startReplication(switchTransactionLog);
@@ -1684,7 +1665,7 @@ public class ZkController implements Closeable {
       zkStateReader.waitForState(collection, timeoutms * 2, TimeUnit.MILLISECONDS, (n, c) -> checkLeaderUrl(cloudDesc, leaderUrl, collection, shardId, leaderConflictResolveWait));
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error getting leader from zk", e);
     }
     return leaderUrl;
@@ -1699,7 +1680,7 @@ public class ZkController implements Closeable {
 
       // leaderUrl = getLeaderProps(collection, cloudDesc.getShardId(), timeoutms).getCoreUrl();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
     return clusterStateLeaderUrl != null;
@@ -2086,7 +2067,7 @@ public class ZkController implements Closeable {
           return false;
         });
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(ErrorCode.SERVER_ERROR, "Could not get shard id for core: " + cd.getName());
       }
     } catch (TimeoutException e1) {
@@ -2152,13 +2133,13 @@ public class ZkController implements Closeable {
       log.error("", e);
       throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
     } catch (NotInClusterStateException e) {
       // make the stack trace less verbose
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "", e);
     }
 
@@ -2341,7 +2322,7 @@ public class ZkController implements Closeable {
     try {
       return asyncIdsMap.putIfAbsent(asyncId, EMPTY_BYTE_ARRAY);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new RuntimeException(e);
     }
   }
@@ -2356,7 +2337,7 @@ public class ZkController implements Closeable {
     try {
       return asyncIdsMap.remove(asyncId);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new RuntimeException(e);
     }
   }
@@ -2424,7 +2405,7 @@ public class ZkController implements Closeable {
             } catch (NoNodeException e) {
               //no problem
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               return;
             } catch (Exception e) {
               log.warn("Old election node exists , could not be removed ", e);
@@ -2440,7 +2421,7 @@ public class ZkController implements Closeable {
                zkClient, overseer), joinAtHead);
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to rejoin election", e);
     }
 
@@ -2477,7 +2458,7 @@ public class ZkController implements Closeable {
       if (prevContext != null) prevContext.close();
       elect.retryElection(context, params.getBool(REJOIN_AT_HEAD_PROP, false));
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to rejoin election", e);
     } finally {
       MDCLoggingContext.clear();
@@ -2502,7 +2483,7 @@ public class ZkController implements Closeable {
     } catch (NoNodeException nne) {
       return;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("could not read the overseer designate ", e);
     }
   }
@@ -2586,7 +2567,7 @@ public class ZkController implements Closeable {
                 log.debug("failed to set data version in zk is {} and expected version is {} ", stat.getVersion(), znodeVersion);
               }
             } catch (Exception e1) {
-              ParWork.propegateInterrupt(e1);
+              ParWork.propagateInterrupt(e1);
               log.warn("could not get stat");
             }
 
@@ -2604,7 +2585,7 @@ public class ZkController implements Closeable {
         Stat stat = zkClient.exists(resourceLocation, null);
         v = stat.getVersion();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error(e.getMessage());
 
       }
@@ -2615,7 +2596,7 @@ public class ZkController implements Closeable {
     } catch (ResourceModifiedInZkException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       final String msg = "Error persisting resource at " + resourceLocation;
       log.error(msg, e);
       throw new SolrException(ErrorCode.SERVER_ERROR, msg, e);
@@ -2628,7 +2609,7 @@ public class ZkController implements Closeable {
     try {
       zkClient.setData(zkLoader.getConfigSetZkPath(), new byte[]{0}, true);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       final String msg = "Error 'touching' conf location " + zkLoader.getConfigSetZkPath();
       log.error(msg, e);
       throw new SolrException(ErrorCode.SERVER_ERROR, msg, e);
@@ -2832,7 +2813,7 @@ public class ZkController implements Closeable {
             log.warn("Failed to unregister core:{}", coreName, e);
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Failed to unregister core:{}", coreName, e);
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
index 6a14706..0755f56 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
@@ -16,6 +16,22 @@
  */
 package org.apache.solr.cloud;
 
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.LongAdder;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Predicate;
+
 import com.codahale.metrics.Timer;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
@@ -37,22 +53,6 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.LongAdder;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.function.Predicate;
-
 /**
  * <p>A ZK-based distributed queue. Optimized for single-consumer,
  * multiple-producer: if there are multiple consumers on the same ZK queue,
@@ -154,7 +154,7 @@ public class ZkDistributedQueue implements DistributedQueue {
       } catch (KeeperException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
     }finally {
@@ -470,7 +470,7 @@ public class ZkDistributedQueue implements DistributedQueue {
           try {
             changed.await(500, TimeUnit.MILLISECONDS);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new AlreadyClosedException();
           }
           if (timeout.hasTimedOut()) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java b/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java
index 0b5fe66..2d2bc6b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkShardTerms.java
@@ -17,6 +17,14 @@
 
 package org.apache.solr.cloud;
 
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.solr.client.solrj.cloud.ShardTerms;
 import org.apache.solr.common.AlreadyClosedException;
 import org.apache.solr.common.ParWork;
@@ -33,16 +41,6 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.invoke.MethodHandles;
-import java.sql.Connection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
-
 /**
  * Class used for interact with a ZK term node.
  * Each ZK term node relates to a shard of a collection and have this format (in json)
@@ -321,7 +319,7 @@ public class ZkShardTerms implements AutoCloseable{
     } catch (KeeperException.NoNodeException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error while saving shard term for collection: " + collection, e);
     }
     return false;
@@ -339,7 +337,7 @@ public class ZkShardTerms implements AutoCloseable{
     } catch (KeeperException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error updating shard term for collection: " + collection, e);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error updating shard term for collection: " + collection, e);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java b/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
index 4f6f1bf..c7b20fb 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkSolrResourceLoader.java
@@ -16,6 +16,12 @@
  */
 package org.apache.solr.cloud;
 
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+import java.nio.file.Path;
+
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
@@ -28,12 +34,6 @@ import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-import java.nio.file.Path;
-
 /**
  * ResourceLoader that works with ZooKeeper.
  *
@@ -83,7 +83,7 @@ public class ZkSolrResourceLoader extends SolrResourceLoader implements Resource
       }
       return new ZkByteArrayInputStream(bytes, stat);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted while opening " + file, e);
     } catch (KeeperException.NoNodeException e) {
       throw new SolrResourceNotFoundException("Can't find resource '" + resource
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java
index 4e04c5c..cc0b540 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/AddReplicaCmd.java
@@ -17,21 +17,6 @@
 
 package org.apache.solr.cloud.api.collections;
 
-import static org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler.SKIP_CREATE_REPLICA_IN_CLUSTER_STATE;
-import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS;
-import static org.apache.solr.common.cloud.ZkStateReader.PULL_REPLICAS;
-import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.TLOG_REPLICAS;
-import static org.apache.solr.common.params.CollectionAdminParams.COLL_CONF;
-import static org.apache.solr.common.params.CollectionAdminParams.FOLLOW_ALIASES;
-import static org.apache.solr.common.params.CollectionAdminParams.WITH_COLLECTION;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
-import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
-import static org.apache.solr.common.params.CommonAdminParams.TIMEOUT;
-import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE;
-
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
@@ -51,11 +36,9 @@ import java.util.stream.Collectors;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
-import org.apache.solr.cloud.ActiveReplicaWatcher;
 import org.apache.solr.cloud.Overseer;
 import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler.ShardRequestTracker;
 import org.apache.solr.common.ParWork;
-import org.apache.solr.common.SolrCloseableLatch;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
@@ -76,6 +59,21 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler.SKIP_CREATE_REPLICA_IN_CLUSTER_STATE;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS;
+import static org.apache.solr.common.cloud.ZkStateReader.PULL_REPLICAS;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.TLOG_REPLICAS;
+import static org.apache.solr.common.params.CollectionAdminParams.COLL_CONF;
+import static org.apache.solr.common.params.CollectionAdminParams.FOLLOW_ALIASES;
+import static org.apache.solr.common.params.CollectionAdminParams.WITH_COLLECTION;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonAdminParams.TIMEOUT;
+import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE;
+
 public class AddReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -203,7 +201,7 @@ public class AddReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
       try {
         shardRequestTracker.processResponses(results, shardHandler, true, "ADDREPLICA failed to create replica");
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return;
       }
 
@@ -345,7 +343,7 @@ public class AddReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
         try {
           ocmh.overseer.offerStateUpdate(Utils.toJSON(props));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception updating Overseer state queue", e);
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java
index 840b6b1..a3d0017 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java
@@ -16,6 +16,25 @@
  */
 package org.apache.solr.cloud.api.collections;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
 import com.google.common.collect.ImmutableMap;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
@@ -41,24 +60,6 @@ import org.slf4j.LoggerFactory;
 import static org.apache.solr.client.solrj.cloud.autoscaling.Policy.POLICY;
 import static org.apache.solr.common.cloud.DocCollection.SNITCH;
 import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.stream.Collectors;
 
 // nocommit - this needs work, but lets not hit zk and other nodes if we dont need for base Assign
 public class Assign {
@@ -317,7 +318,7 @@ public class Assign {
           nodesList);
       return replicaPositions;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new AssignmentException("Error getting replica locations : " + e.getMessage(), e);
     } finally {
       if (log.isTraceEnabled()) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java
index e6ca89c..6323168 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java
@@ -17,13 +17,29 @@
 
 package org.apache.solr.cloud.api.collections;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
 import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.client.solrj.impl.BaseCloudSolrClient;
-import org.apache.solr.cloud.LeaderElector;
 import org.apache.solr.cloud.Overseer;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler.ShardRequestTracker;
@@ -75,22 +91,6 @@ import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
 import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE;
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.util.StrUtils.formatString;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Properties;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicReference;
 
 public class CreateCollectionCmd implements OverseerCollectionMessageHandler.Cmd {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -407,7 +407,7 @@ public class CreateCollectionCmd implements OverseerCollectionMessageHandler.Cmd
       }
 
     } catch (InterruptedException ex) {
-      ParWork.propegateInterrupt(ex);
+      ParWork.propagateInterrupt(ex);
       throw ex;
     } catch (SolrException ex) {
       log.error("Exception creating collections", ex);
@@ -649,7 +649,7 @@ public class CreateCollectionCmd implements OverseerCollectionMessageHandler.Cmd
     try {
       configManager.copyConfigDir(ConfigSetsHandlerApi.DEFAULT_CONFIGSET_NAME, targetConfig, new HashSet<>());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.INVALID_STATE, "Error while copying _default to " + targetConfig, e);
     }
   }
@@ -735,7 +735,7 @@ public class CreateCollectionCmd implements OverseerCollectionMessageHandler.Cmd
               ZkStateReader.emptyJson, CreateMode.PERSISTENT, false);
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "createCollectionZkNode(DistribStateManager=" + stateManager + ", String=" + collection + ", Map<String,String>=" + params + ")", e);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java
index 9f93186..b153b53 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteNodeCmd.java
@@ -22,8 +22,6 @@ import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
 
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.cloud.ClusterState;
@@ -124,7 +122,7 @@ public class DeleteNodeCmd implements OverseerCollectionMessageHandler.Cmd {
           } catch (KeeperException e) {
             log.warn("Error deleting ", e);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           }catch (Exception e) {
             log.warn("Error deleting ", e);
             throw e;
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java
index 0024c55..84888a0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteReplicaCmd.java
@@ -16,6 +16,14 @@
  */
 package org.apache.solr.cloud.api.collections;
 
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler.Cmd;
 import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler.ShardRequestTracker;
 import org.apache.solr.common.ParWork;
@@ -42,13 +50,6 @@ import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
 import static org.apache.solr.common.params.CollectionAdminParams.COUNT_PROP;
 import static org.apache.solr.common.params.CollectionAdminParams.FOLLOW_ALIASES;
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
 
 
 public class DeleteReplicaCmd implements Cmd {
@@ -260,7 +261,7 @@ public class DeleteReplicaCmd implements Cmd {
         }
         ocmh.deleteCoreNode(collectionName, replicaName, replica, core);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         results.add("failure", "Could not complete delete " + e.getMessage());
       } finally {
         if (onComplete != null) onComplete.run();
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java
index ea84ea2..57378d5 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/DeleteShardCmd.java
@@ -17,14 +17,6 @@
  */
 package org.apache.solr.cloud.api.collections;
 
-import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.NODE_NAME_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
-import static org.apache.solr.common.params.CollectionAdminParams.FOLLOW_ALIASES;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
-import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
-
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -53,6 +45,14 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.NODE_NAME_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CollectionAdminParams.FOLLOW_ALIASES;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+
 public class DeleteShardCmd implements OverseerCollectionMessageHandler.Cmd {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private final OverseerCollectionMessageHandler ocmh;
@@ -136,7 +136,7 @@ public class DeleteShardCmd implements OverseerCollectionMessageHandler.Cmd {
           cleanupLatch.countDown();
           throw e;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Error deleting replica: {}", r, e);
           cleanupLatch.countDown();
           throw e;
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java
index c7b74c5..c6a3f8e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MaintainRoutedAliasCmd.java
@@ -130,7 +130,7 @@ public class MaintainRoutedAliasCmd extends AliasCmd {
                 try {
                   deleteTargetCollection(clusterState, results, aliasName, aliasesManager, action);
                 } catch (Exception e) {
-                  ParWork.propegateInterrupt(e);
+                  ParWork.propagateInterrupt(e);
                   log.warn("Deletion of {} by {} {} failed (this might be ok if two clients were"
                           , action.targetCollection, ra.getAliasName()
                           , " writing to a routed alias at the same time and both caused a deletion)");
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java
index 0dc3377..7b3b2ff 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java
@@ -154,7 +154,7 @@ public class MigrateCmd implements OverseerCollectionMessageHandler.Cmd {
         ocmh.commandMap.get(DELETE).call(zkStateReader.getClusterState(), new ZkNodeProps(props), results);
         clusterState = zkStateReader.getClusterState();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.warn("Unable to clean up existing temporary collection: {}", tempSourceCollectionName, e);
       }
     }
@@ -377,7 +377,7 @@ public class MigrateCmd implements OverseerCollectionMessageHandler.Cmd {
           NAME, tempSourceCollectionName);
       ocmh.commandMap.get(DELETE). call(zkStateReader.getClusterState(), new ZkNodeProps(props), results);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Unable to delete temporary collection: {}. Please remove it manually", tempSourceCollectionName, e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
index 9a2f8c7..e092174 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
@@ -232,7 +232,7 @@ public class MoveReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
     try {
       ocmh.addReplica(ocmh.zkStateReader.getClusterState(), addReplicasProps, addResult, null);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // fatal error - try rolling back
       String errorString = String.format(Locale.ROOT, "Failed to create replica for collection=%s shard=%s" +
           " on node=%s, failure=%s", coll.getName(), slice.getName(), targetNode, addResult.get("failure"));
@@ -261,7 +261,7 @@ public class MoveReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
       try {
         ocmh.addReplica(ocmh.zkStateReader.getClusterState(), addReplicasProps, rollback, null);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Fatal error during MOVEREPLICA of " + replica
             + ", collection may be inconsistent!", e);
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java
index 3878cd0..9703b4f 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/OverseerCollectionMessageHandler.java
@@ -16,6 +16,22 @@
  */
 package org.apache.solr.cloud.api.collections;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicReference;
+
 import com.google.common.collect.ImmutableMap;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.solr.client.solrj.SolrResponse;
@@ -67,7 +83,6 @@ import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
 import org.apache.solr.handler.component.ShardHandler;
-import org.apache.solr.handler.component.ShardHandlerFactory;
 import org.apache.solr.handler.component.ShardRequest;
 import org.apache.solr.handler.component.ShardResponse;
 import org.apache.solr.logging.MDCLoggingContext;
@@ -95,61 +110,10 @@ import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
 import static org.apache.solr.common.params.CollectionAdminParams.COLLECTION;
 import static org.apache.solr.common.params.CollectionAdminParams.COLOCATED_WITH;
 import static org.apache.solr.common.params.CollectionAdminParams.WITH_COLLECTION;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ALIASPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.BACKUP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.BALANCESHARDUNIQUE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATEALIAS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATESHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATESNAPSHOT;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEALIAS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETENODE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICAPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESNAPSHOT;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MAINTAINROUTEDALIAS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MIGRATE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MIGRATESTATEFORMAT;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOCK_COLL_TASK;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOCK_REPLICA_TASK;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOCK_SHARD_TASK;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MODIFYCOLLECTION;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.OVERSEERSTATUS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REBALANCELEADERS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REINDEXCOLLECTION;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.RELOAD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REMOVEROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.RENAME;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REPLACENODE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.RESTORE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.UTILIZENODE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.*;
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.util.Utils.makeMap;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicReference;
 
 /**
  * A {@link OverseerMessageHandler} that handles Collections API related
@@ -301,7 +265,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
             + operation);
       }
     }  catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw e;
     } catch (Exception e) {
       String collName = message.getStr("collection");
@@ -551,7 +515,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
       updateResponse = softCommit(coreUrl, overseer.getCoreContainer().getUpdateShardHandler().getTheSharedHttpClient());
       processResponse(results, null, coreUrl, updateResponse, slice, Collections.emptySet());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       processResponse(results, e, coreUrl, updateResponse, slice, Collections.emptySet());
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to call distrib softCommit on: " + coreUrl, e);
     }
@@ -599,7 +563,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
         error = "Timeout waiting for collection state.";
       throw new ZkController.NotInClusterStateException(ErrorCode.SERVER_ERROR, error);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Interrupted", e);
     }
 
@@ -622,7 +586,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
       String error = "Timeout waiting for new shard.";
       throw new ZkController.NotInClusterStateException(ErrorCode.SERVER_ERROR, error);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Interrupted", e);
     }
   }
@@ -699,7 +663,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
         return true;
       });
     } catch (TimeoutException | InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("modifyCollection(ClusterState=" + clusterState + ", ZkNodeProps=" + message + ", NamedList=" + results + ")", e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Could not modify collection " + message, e);
     }
@@ -761,7 +725,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
         error = "Timeout waiting for collection state.";
       throw new SolrException(ErrorCode.SERVER_ERROR, error);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Interrupted", e);
     }
     return result.get();
@@ -916,7 +880,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
           } catch (KeeperException e) {
             log.error("ZooKeeper exception", e);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             return;
           }
           if (rstats2 != null) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
index 7bcc074..80e186c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
@@ -411,7 +411,7 @@ public class ReindexCollectionCmd implements OverseerCollectionMessageHandler.Cm
       try {
         rsp = ocmh.cloudManager.request(new QueryRequest(q));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to copy documents from " +
             collection + " to " + targetCollection, e);
       }
@@ -499,7 +499,7 @@ public class ReindexCollectionCmd implements OverseerCollectionMessageHandler.Cm
       reindexingState.put(PHASE, "done");
       removeReindexingState(collection);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Error during reindexing of {}", extCollection, e);
       exc = e;
       aborted = true;
@@ -557,7 +557,7 @@ public class ReindexCollectionCmd implements OverseerCollectionMessageHandler.Cm
       QueryResponse rsp = solrClient.query(collection, params);
       return rsp.getResults().getNumFound();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       return 0L;
     }
   }
@@ -673,7 +673,7 @@ public class ReindexCollectionCmd implements OverseerCollectionMessageHandler.Cm
             }
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception waiting for daemon " +
               daemonName + " at " + daemonUrl, e);
         }
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java
index b7c5cb4..c170a98 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReplaceNodeCmd.java
@@ -222,7 +222,7 @@ public class ReplaceNodeCmd implements OverseerCollectionMessageHandler.Cmd {
           cleanupLatch.countDown();
           log.warn("Error deleting replica ", e);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Error deleting replica ", e);
           cleanupLatch.countDown();
           throw e;
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java
index ffb3d1e..30b5920 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/RoutedAlias.java
@@ -265,7 +265,7 @@ public abstract class RoutedAlias {
     } catch (SolrException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
   }
@@ -364,7 +364,7 @@ public abstract class RoutedAlias {
         try {
           ensureCollection(targetCollectionDesc.creationCollection, coreContainer);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Async creation of a collection for routed Alias {} failed!", this.getAliasName(), e);
         }
       }, core);
@@ -409,7 +409,7 @@ public abstract class RoutedAlias {
     } catch (RuntimeException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
index 0976889..3298996 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/SplitShardCmd.java
@@ -148,7 +148,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     try {
       parentShardLeader = zkStateReader.getLeaderRetry(collectionName, slice.get(), 10000);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted.", e);
     }
 
@@ -271,7 +271,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
             try {
               ocmh.commandMap.get(DELETESHARD).call(clusterState, m, new NamedList());
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to delete already existing sub shard: " + subSlice,
                   e);
             }
@@ -614,7 +614,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     } catch (SolrException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Error executing split operation for collection: {} parent shard: {}", collectionName, slice, e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, null, e);
     } finally {
@@ -735,7 +735,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
         ZkNodeProps m = new ZkNodeProps(propMap);
         ocmh.overseer.offerStateUpdate(Utils.toJSON(m));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         // don't give up yet - just log the error, we may still be able to clean up
         log.warn("Cleanup failed after failed split of {}/{}: (slice state changes)", collectionName, parentShard, e);
       }
@@ -756,7 +756,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       try {
         ocmh.commandMap.get(DELETESHARD).call(clusterState, m, new NamedList<Object>());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.warn("Cleanup failed after failed split of {}/{} : (deleting existing sub shard{})", collectionName, parentShard, subSlice, e);
       }
     }
@@ -808,7 +808,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
     try {
       fuzz = Float.parseFloat(fuzzStr);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid numeric value of 'fuzz': " + fuzzStr);
     }
 
@@ -827,7 +827,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
           try {
             subRanges.add(DocRouter.DEFAULT.fromString(r));
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Exception in parsing hexadecimal hash range: " + r, e);
           }
           if (!subRanges.get(i).isSubsetOf(range)) {
@@ -907,7 +907,7 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
       try {
         cloudManager.getDistribStateManager().makePath(path, data, CreateMode.EPHEMERAL, true);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.INVALID_STATE, "Can't lock parent slice for splitting (another split operation running?): " +
             collection + "/" + shard, e);
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/TimeRoutedAlias.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/TimeRoutedAlias.java
index 18b3d71..c4f168a 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/TimeRoutedAlias.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/TimeRoutedAlias.java
@@ -169,7 +169,7 @@ public class TimeRoutedAlias extends RoutedAlias {
         throw new SolrException(BAD_REQUEST, "duration must add to produce a time in the future");
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(BAD_REQUEST, "bad " + TimeRoutedAlias.ROUTER_INTERVAL + ", " + e, e);
     }
 
@@ -180,7 +180,7 @@ public class TimeRoutedAlias extends RoutedAlias {
           throw new SolrException(BAD_REQUEST, "duration must round or subtract to produce a time in the past");
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(BAD_REQUEST, "bad " + TimeRoutedAlias.ROUTER_AUTO_DELETE_AGE + ", " + e, e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
index c631835..3b619b2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
@@ -174,7 +174,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
       }
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       rsp.getValues().add("result", "failure");
       throw e;
     } finally {
@@ -285,7 +285,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     try {
       cp = clusterPolicy.stream().map(Clause::create).collect(Collectors.toList());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       op.addError(e.getMessage());
       return currentConfig;
     }
@@ -306,7 +306,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     try {
       prefs = preferences.stream().map(Preference::new).collect(Collectors.toList());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       op.addError(e.getMessage());
       return currentConfig;
     }
@@ -356,7 +356,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
       newClauses = Policy.clausesFromMap((Map<String, List<Map<String, Object>>>) op.getCommandData(),
           new ArrayList<>() );
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       op.addError(e.getMessage());
       return currentConfig;
     }
@@ -508,7 +508,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
       op.addError("invalid listener configuration: " + e.toString());
       return currentConfig;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("error loading listener class ", e);
       op.addError("Listener not found: " + listenerClass + ". error message:" + e.getMessage());
       return currentConfig;
@@ -578,7 +578,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
       try {
         loader.findClass(klass, TriggerAction.class);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.warn("Could not load class : ", e);
         op.addError("Action not found: " + klass + " " + e.getMessage());
         return currentConfig;
@@ -590,7 +590,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     try {
       t = triggerFactory.create(trigger.event, trigger.name, trigger.properties);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("", e);
       op.addError("Error validating trigger config " + trigger.name + ": " + e.toString());
       return currentConfig;
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
index afba1e2..039ebb8 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
@@ -17,6 +17,19 @@
 
 package org.apache.solr.cloud.autoscaling;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
@@ -39,18 +52,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.cloud.autoscaling.TriggerEvent.NODE_NAMES;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
 
 /**
  * This class is responsible for using the configured policy and preferences
@@ -190,7 +191,7 @@ public class ComputePlanAction extends TriggerActionBase {
         releasePolicySession(sessionWrapper, session);
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Unexpected exception while processing event: " + event, e);
     }
@@ -223,7 +224,7 @@ public class ComputePlanAction extends TriggerActionBase {
     try {
       return Integer.parseInt(String.valueOf(o));
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Invalid '{}' event property: {}, using default {}", AutoScalingParams.MAX_COMPUTE_OPERATIONS, o, maxOp);
       return maxOp;
     }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
index f9562b5..8a8a8a9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
@@ -33,9 +33,9 @@ import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrResponse;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
+import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.response.RequestStatusState;
 import org.apache.solr.common.ParWork;
@@ -140,7 +140,7 @@ public class ExecutePlanAction extends TriggerActionBase {
                 try {
                   cloudManager.getDistribStateManager().removeData(znode, -1);
                 } catch (Exception e) {
-                  ParWork.propegateInterrupt(e);
+                  ParWork.propagateInterrupt(e);
                   log.warn("Unexpected exception while trying to delete znode: {}", znode, e);
                 }
               }
@@ -160,7 +160,7 @@ public class ExecutePlanAction extends TriggerActionBase {
               try {
                 cloudManager.getDistribStateManager().removeData(znode, -1);
               } catch (Exception e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 log.warn("Unexpected exception while trying to delete znode: {}", znode, e);
               }
               throw new IOException("Task " + asyncId + " failed: " + (statusResponse != null ? statusResponse : " timed out. Operation: " + req));
@@ -182,16 +182,16 @@ public class ExecutePlanAction extends TriggerActionBase {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Unexpected exception executing operation: " + operation.getParams(), e);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ExecutePlanAction was interrupted", e);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Unexpected exception executing operation: " + operation.getParams(), e);
         }
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Unexpected exception while processing event: " + event, e);
     }
@@ -217,7 +217,7 @@ public class ExecutePlanAction extends TriggerActionBase {
           return statusResponse;
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         Throwable rootCause = ExceptionUtils.getRootCause(e);
         if (rootCause instanceof IllegalStateException && rootCause.getMessage().contains("Connection pool shut down"))  {
           throw e;
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java
index c081b82..deb98c0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java
@@ -16,6 +16,17 @@
  */
 package org.apache.solr.cloud.autoscaling;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.BadVersionException;
@@ -30,16 +41,6 @@ import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE;
 import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
 
 /**
  * This plan simply removes nodeAdded and nodeLost markers from Zookeeper if their TTL has
@@ -69,7 +70,7 @@ public class InactiveMarkersPlanAction extends TriggerActionBase {
     try {
       cleanupTTL = Integer.parseInt(cleanupStr);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), TTL_PROP, "invalid value '" + cleanupStr + "': " + e.toString());
     }
     if (cleanupTTL < 0) {
@@ -135,7 +136,7 @@ public class InactiveMarkersPlanAction extends TriggerActionBase {
           log.trace(" -- keep {}, delta={}, ttl={}, active={}", markerPath, delta, cleanupTTL, activeMarker);
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return;
       } catch (IOException | KeeperException e) {
         log.warn("Could not cleanup marker at {}, skipping... ({}}", markerPath, e.getMessage());
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
index 93a0fc9..b5815ef 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
@@ -34,7 +34,6 @@ import org.apache.solr.common.ParWork;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.AutoScalingParams;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
 import org.slf4j.Logger;
@@ -70,7 +69,7 @@ public class InactiveShardPlanAction extends TriggerActionBase {
     try {
       cleanupTTL = Integer.parseInt(cleanupStr);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), TTL_PROP, "invalid value '" + cleanupStr + "': " + e.toString());
     }
     if (cleanupTTL < 0) {
@@ -151,7 +150,7 @@ public class InactiveShardPlanAction extends TriggerActionBase {
             }
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Exception checking for inactive shard split locks in {}", parentPath, e);
         }
       })
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
index d80121d..76665ac 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
@@ -24,13 +24,13 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.Locale;
 
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
@@ -121,7 +121,7 @@ public class IndexSizeTrigger extends TriggerBase {
         throw new Exception("value must be > 0");
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), ABOVE_BYTES_PROP, "invalid value '" + aboveStr + "': " + e.toString());
     }
     try {
@@ -130,7 +130,7 @@ public class IndexSizeTrigger extends TriggerBase {
         belowBytes = -1;
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), BELOW_BYTES_PROP, "invalid value '" + belowStr + "': " + e.toString());
     }
     // below must be at least 2x smaller than above, otherwise splitting a shard
@@ -148,7 +148,7 @@ public class IndexSizeTrigger extends TriggerBase {
         throw new Exception("value must be > 0");
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), ABOVE_DOCS_PROP, "invalid value '" + aboveStr + "': " + e.toString());
     }
     try {
@@ -157,7 +157,7 @@ public class IndexSizeTrigger extends TriggerBase {
         belowDocs = -1;
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), BELOW_DOCS_PROP, "invalid value '" + belowStr + "': " + e.toString());
     }
     // below must be at least 2x smaller than above, otherwise splitting a shard
@@ -189,7 +189,7 @@ public class IndexSizeTrigger extends TriggerBase {
         throw new Exception("must be > 1");
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), MAX_OPS_PROP, "invalid value: '" + maxOpsStr + "': " + e.getMessage());
     }
     String methodStr = (String)properties.getOrDefault(SPLIT_METHOD_PROP, SolrIndexSplitter.SplitMethod.LINK.toLower());
@@ -201,14 +201,14 @@ public class IndexSizeTrigger extends TriggerBase {
     try {
       splitFuzz = Float.parseFloat(fuzzStr);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), SPLIT_FUZZ_PROP, "invalid value: '" + fuzzStr + "': " + e.getMessage());
     }
     String splitByPrefixStr = String.valueOf(properties.getOrDefault(SPLIT_BY_PREFIX, false));
     try {
       splitByPrefix = getValidBool(splitByPrefixStr);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), SPLIT_BY_PREFIX, "invalid value: '" + splitByPrefixStr + "': " + e.getMessage());
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
index e8d1cab..0691176 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
@@ -89,7 +89,7 @@ public class NodeAddedTrigger extends TriggerBase {
             return;
           }
         } catch (InterruptedException | IOException | KeeperException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.debug("-- ignoring marker {} state due to error{}", markerPath, e);
         }
         // don't add nodes that have since gone away
@@ -102,7 +102,7 @@ public class NodeAddedTrigger extends TriggerBase {
     } catch (NoSuchElementException e) {
       // ignore
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception retrieving nodeLost markers", e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
index 5544717..ce468f3b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
@@ -21,7 +21,6 @@ import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -50,7 +49,6 @@ import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACT
 import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE;
 import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE;
 import static org.apache.solr.common.params.AutoScalingParams.PREFERRED_OP;
-import static org.apache.solr.common.params.AutoScalingParams.REPLICA_TYPE;
 
 /**
  * Trigger for the {@link TriggerEventType#NODELOST} event
@@ -88,7 +86,7 @@ public class NodeLostTrigger extends TriggerBase {
             return;
           }
         } catch (InterruptedException | IOException | KeeperException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.debug("-- ignoring marker {} state due to error", markerPath, e);
         }
         // don't add nodes that have since came back
@@ -101,7 +99,7 @@ public class NodeLostTrigger extends TriggerBase {
     } catch (NoSuchElementException | AlreadyClosedException e) {
       // ignore
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception retrieving nodeLost markers", e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java
index 2ae0b86..b552218 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java
@@ -17,6 +17,19 @@
 
 package org.apache.solr.cloud.autoscaling;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
@@ -37,18 +50,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.cloud.ZkStateReader.SOLR_AUTOSCALING_CONF_PATH;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.ReentrantLock;
 
 /**
  * Overseer thread responsible for reading triggers from zookeeper and
@@ -164,7 +165,7 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable {
         // somebody else has changed the configuration so we must retry
       } catch (InterruptedException e) {
         // Restore the interrupted status
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return;
       }
       catch (IOException | KeeperException e) {
@@ -186,10 +187,10 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable {
     } catch (IOException e) {
       log.error("IO error: [{}]", e);
     } catch (InterruptedException | AlreadyClosedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       return;
     } catch (Exception e)  {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Unexpected exception", e);
     }
 
@@ -227,7 +228,7 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable {
           updateLock.unlock();
         }
       } catch (InterruptedException | AlreadyClosedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return;
       }
      
@@ -251,7 +252,7 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable {
           log.info("already closed");
           return;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           if (e instanceof KeeperException.SessionExpiredException || e instanceof InterruptedException) {
             log.error("", e);
             return;
@@ -264,7 +265,7 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable {
         deactivateMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH);
         deactivateMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH);
       } catch (InterruptedException | AlreadyClosedException e) {
-        ParWork.propegateInterrupt(e, true);
+        ParWork.propagateInterrupt(e, true);
         return;
       } catch (KeeperException e) {
         log.error("", e);
@@ -272,7 +273,7 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable {
           return;
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Exception deactivating markers", e);
       }
 
@@ -312,7 +313,7 @@ public class OverseerTriggerThread implements Runnable, SolrCloseable {
       } catch (IOException e) {
         log.warn("IO Error: [{}]", e);
       } catch (InterruptedException | AlreadyClosedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       } catch (Exception e)  {
         log.error("Unexpected exception", e);
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTrigger.java
index 8d48b54..37f8cb2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTrigger.java
@@ -133,7 +133,7 @@ public class ScheduledTrigger extends TriggerBase {
     try {
       return Instant.from(dateTimeFormatter.parse(startTimeStr));
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException("startTime", "error parsing startTime '" + startTimeStr + "': " + e.toString());
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
index 1bff431..4afa3dd 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
@@ -57,9 +57,9 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.ObjectReleaseTracker;
+import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -69,7 +69,6 @@ import static org.apache.solr.common.params.AutoScalingParams.ACTION_THROTTLE_PE
 import static org.apache.solr.common.params.AutoScalingParams.TRIGGER_COOLDOWN_PERIOD_SECONDS;
 import static org.apache.solr.common.params.AutoScalingParams.TRIGGER_CORE_POOL_SIZE;
 import static org.apache.solr.common.params.AutoScalingParams.TRIGGER_SCHEDULE_DELAY_SECONDS;
-import static org.apache.solr.common.util.ExecutorUtil.awaitTermination;
 
 /**
  * Responsible for scheduling active triggers, starting and stopping them and
@@ -222,7 +221,7 @@ public class ScheduledTriggers implements Closeable {
     try {
       st = new TriggerWrapper(newTrigger, cloudManager, queueStats);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if (cloudManager.isClosed()) {
         log.error("Failed to add trigger {} - closing or disconnected from data provider", newTrigger.getName(), e);
       } else {
@@ -326,7 +325,7 @@ public class ScheduledTriggers implements Closeable {
                 try {
                   action.process(event, actionContext);
                 } catch (Exception e) {
-                  ParWork.propegateInterrupt(e);
+                  ParWork.propagateInterrupt(e);
                   triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.FAILED, action.getName(), actionContext, e, null);
                   throw new TriggerActionException(event.getSource(), action.getName(), "Error processing action for trigger event: " + event, e);
                 }
@@ -343,7 +342,7 @@ public class ScheduledTriggers implements Closeable {
             } catch (TriggerActionException e) {
               log.warn("Exception executing actions", e);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.FAILED);
               log.warn("Unhandled exception executing actions", e);
             } finally {
@@ -445,7 +444,7 @@ public class ScheduledTriggers implements Closeable {
                     }
                   }
                 } catch (Exception e) {
-                  ParWork.propegateInterrupt(e);
+                  ParWork.propagateInterrupt(e);
                   if (cloudManager.isClosed())  {
                     throw e; // propagate the abort to the caller
                   }
@@ -464,10 +463,10 @@ public class ScheduledTriggers implements Closeable {
         }
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Thread interrupted", e);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // we catch but don't rethrow because a failure to wait for pending tasks
       // should not keep the actions from executing
       log.error("Unexpected exception while waiting for pending tasks to finish", e);
@@ -503,13 +502,13 @@ public class ScheduledTriggers implements Closeable {
     try {
       stateManager.removeRecursively(statePath, true, true);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Failed to remove state for removed trigger {}", statePath, e);
     }
     try {
       stateManager.removeRecursively(eventsPath, true, true);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Failed to remove events for removed trigger {}", eventsPath, e);
     }
   }
@@ -628,7 +627,7 @@ public class ScheduledTriggers implements Closeable {
               try {
                 trigger.restoreState();
               } catch (Exception e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 // log but don't throw - see below
                 log.error("Error restoring trigger state {}", trigger.getName(), e);
               }
@@ -639,7 +638,7 @@ public class ScheduledTriggers implements Closeable {
           } catch (KeeperException.NoNodeException e) {
               log.info("No node found for {}", e.getPath());
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.error("Unexpected exception from trigger: {}", trigger.getName(), e);
             return;
           }
@@ -648,7 +647,7 @@ public class ScheduledTriggers implements Closeable {
           } catch (AlreadyClosedException e) {
             return;
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             // log but do not propagate exception because an exception thrown from a scheduled operation
             // will suppress future executions
             log.error("Unexpected exception from trigger: {}", trigger.getName(), e);
@@ -748,7 +747,7 @@ public class ScheduledTriggers implements Closeable {
             try {
               listener.close();
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               log.warn("Exception closing old listener {}", listener.getConfig(), e);
             }
             it.remove();
@@ -768,7 +767,7 @@ public class ScheduledTriggers implements Closeable {
               try {
                 oldListener.close();
               } catch (Exception e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 log.warn("Exception closing old listener {}", oldListener.getConfig(), e);
               }
             } else {
@@ -780,7 +779,7 @@ public class ScheduledTriggers implements Closeable {
             try {
               listener = loader.newInstance(clazz, TriggerListener.class);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               log.warn("Invalid TriggerListener class name '{}', skipping...", clazz, e);
             }
             if (listener != null) {
@@ -789,7 +788,7 @@ public class ScheduledTriggers implements Closeable {
                 listener.init();
                 listenersPerName.put(config.name, listener);
               } catch (Exception e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 log.warn("Error initializing TriggerListener {}", config, e);
                 IOUtils.closeQuietly(listener);
                 listener = null;
@@ -896,7 +895,7 @@ public class ScheduledTriggers implements Closeable {
           try {
             listener.onEvent(event, stage, actionName, context, error, message);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.warn("Exception running listener {}", listener.getConfig(), e);
           }
         }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
index 0df3094..24c0b19 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
@@ -32,9 +32,9 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.util.concurrent.AtomicDouble;
+import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
-import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.Suggester;
 import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType;
 import org.apache.solr.common.ParWork;
@@ -158,7 +158,7 @@ public class SearchRateTrigger extends TriggerBase {
     try {
       maxOps = Integer.parseInt(maxOpsStr);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(name, MAX_OPS_PROP, "invalid value '" + maxOpsStr + "': " + e.toString());
     }
 
@@ -170,7 +170,7 @@ public class SearchRateTrigger extends TriggerBase {
           throw new Exception("must be at least 1, or not set to use 'replicationFactor'");
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new TriggerValidationException(name, MIN_REPLICAS_PROP, "invalid value '" + o + "': " + e.toString());
       }
     }
@@ -189,7 +189,7 @@ public class SearchRateTrigger extends TriggerBase {
       try {
         aboveRate = Double.parseDouble(String.valueOf(above));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new TriggerValidationException(name, ABOVE_RATE_PROP, "Invalid configuration value: '" + above + "': " + e.toString());
       }
     } else {
@@ -199,7 +199,7 @@ public class SearchRateTrigger extends TriggerBase {
       try {
         belowRate = Double.parseDouble(String.valueOf(below));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new TriggerValidationException(name, BELOW_RATE_PROP, "Invalid configuration value: '" + below + "': " + e.toString());
       }
     } else {
@@ -213,7 +213,7 @@ public class SearchRateTrigger extends TriggerBase {
       try {
         aboveNodeRate = Double.parseDouble(String.valueOf(above));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new TriggerValidationException(name, ABOVE_NODE_RATE_PROP, "Invalid configuration value: '" + above + "': " + e.toString());
       }
     } else {
@@ -223,7 +223,7 @@ public class SearchRateTrigger extends TriggerBase {
       try {
         belowNodeRate = Double.parseDouble(String.valueOf(below));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new TriggerValidationException(name, BELOW_NODE_RATE_PROP, "Invalid configuration value: '" + below + "': " + e.toString());
       }
     } else {
@@ -246,7 +246,7 @@ public class SearchRateTrigger extends TriggerBase {
     try {
       aboveNodeOp = CollectionParams.CollectionAction.get(String.valueOf(aboveNodeObj));
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new TriggerValidationException(getName(), ABOVE_NODE_OP_PROP, "unrecognized value: '" + aboveNodeObj + "'");
     }
     if (belowNodeObj != null) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java
index 8f1cb19..d978d6f 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java
@@ -30,8 +30,8 @@ import java.util.Set;
 import java.util.StringJoiner;
 
 import org.apache.solr.client.solrj.SolrRequest;
-import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
+import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventProcessorStage;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.common.ParWork;
@@ -127,7 +127,7 @@ public class SystemLogListener extends TriggerListenerBase {
       req.setParam(CollectionAdminParams.COLLECTION, collection);
       cloudManager.request(req);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if ((e instanceof SolrException) && e.getMessage().contains("Collection not found")) {
         // relatively benign but log this - collection still existed when we started
         log.info("Collection {} missing, skip sending event {}", collection, event);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
index aa39aba..bd11c04 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
@@ -16,6 +16,18 @@
  */
 package org.apache.solr.cloud.autoscaling;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
@@ -31,18 +43,6 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicReference;
-
 /**
  * Base class for {@link org.apache.solr.cloud.autoscaling.AutoScaling.Trigger} implementations.
  * It handles state snapshot / restore in ZK.
@@ -119,7 +119,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
         try {
           action = loader.newInstance((String)map.get("class"), TriggerAction.class, "cloud.autoscaling.");
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("", e);
           throw new TriggerValidationException("action", "exception creating action " + map + ": " + e.toString());
         }
@@ -148,7 +148,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
     } catch (AlreadyExistsException e) {
       // ignore
     } catch (InterruptedException | KeeperException | IOException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw e;
     }
     for (TriggerAction action : actions) {
@@ -263,7 +263,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
       stateManager.setData(path, data, -1);
       lastState = state;
     } catch (InterruptedException | BadVersionException | IOException | KeeperException e) {
-      ParWork.propegateInterrupt(e, true);
+      ParWork.propagateInterrupt(e, true);
       log.warn("Exception updating trigger state '{}'", path, e);
     }
   }
@@ -281,7 +281,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
     } catch (AlreadyClosedException e) {
      
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e, true);
+      ParWork.propagateInterrupt(e, true);
       log.warn("Exception getting trigger state '{}'", path, e);
     }
     if (data != null) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
index d6e39c4..5ef75f8 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
@@ -27,8 +27,8 @@ import org.apache.solr.cloud.Stats;
 import org.apache.solr.common.AlreadyClosedException;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.util.Utils;
 import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.common.util.Utils;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -60,10 +60,10 @@ public class TriggerEventQueue {
       delegate.offer(data);
       return true;
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e, true);
+      ParWork.propagateInterrupt(e, true);
       throw new AlreadyClosedException();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception adding event {} to queue {}", event, triggerName, e);
       return false;
     }
@@ -82,13 +82,13 @@ public class TriggerEventQueue {
           Map<String, Object> map = (Map<String, Object>) Utils.fromJSON(data);
           return fromMap(map);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Invalid event data, ignoring: {}", new String(data, StandardCharsets.UTF_8));
           continue;
         }
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e, true);
+      ParWork.propagateInterrupt(e, true);
       throw new AlreadyClosedException();
     } catch (KeeperException.NoNodeException e) {
       log.info("No node found for {}", e.getPath());
@@ -112,16 +112,16 @@ public class TriggerEventQueue {
           Map<String, Object> map = (Map<String, Object>) Utils.fromJSON(data);
           return fromMap(map);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Invalid event data, ignoring: {}", new String(data, StandardCharsets.UTF_8));
           continue;
         }
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e, true);
+      ParWork.propagateInterrupt(e, true);
       throw new AlreadyClosedException();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception polling queue of trigger {}", triggerName, e);
     }
     return null;
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java
index 3f1b59d..8e5fe03 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java
@@ -35,9 +35,9 @@ import java.util.function.Predicate;
 import com.codahale.metrics.Timer;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
+import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.DistributedQueue;
 import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
-import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.cloud.OverseerTaskQueue;
 import org.apache.solr.cloud.Stats;
@@ -131,7 +131,7 @@ public class GenericDistributedQueue implements DistributedQueue {
     } catch (IOException | KeeperException e) {
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
index e52b030..2b1dee1 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
@@ -611,7 +611,7 @@ public class SimCloudManager implements SolrCloudManager {
     try {
       simCloudManagerPool.shutdownNow();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // ignore
     }
     simCloudManagerPool = ParWork.getExecutorService( 10);
@@ -735,7 +735,7 @@ public class SimCloudManager implements SolrCloudManager {
       Future<SolrResponse> rsp = simCloudManagerPool.submit(() -> simHandleSolrRequest(req));
       return rsp.get(120, TimeUnit.SECONDS); // longer then this and something is seriously wrong
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new IOException(e);
     }
   }
@@ -904,7 +904,7 @@ public class SimCloudManager implements SolrCloudManager {
           try {
             clusterStateProvider.simCreateCollection(new ZkNodeProps(params.toNamedList().asMap(10)), results);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
           }
           break;
@@ -913,7 +913,7 @@ public class SimCloudManager implements SolrCloudManager {
             clusterStateProvider.simDeleteCollection(params.get(CommonParams.NAME),
                 params.get(CommonAdminParams.ASYNC), results);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
           }
           break;
@@ -924,7 +924,7 @@ public class SimCloudManager implements SolrCloudManager {
           try {
             clusterStateProvider.simAddReplica(new ZkNodeProps(params.toNamedList().asMap(10)), results);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
           }
           break;
@@ -932,7 +932,7 @@ public class SimCloudManager implements SolrCloudManager {
           try {
             clusterStateProvider.simMoveReplica(new ZkNodeProps(params.toNamedList().asMap(10)), results);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
           }
           break;
@@ -955,7 +955,7 @@ public class SimCloudManager implements SolrCloudManager {
           try {
             clusterStateProvider.simCreateShard(new ZkNodeProps(params.toNamedList().asMap(10)), results);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
           }
           break;
@@ -963,7 +963,7 @@ public class SimCloudManager implements SolrCloudManager {
           try {
             clusterStateProvider.simSplitShard(new ZkNodeProps(params.toNamedList().asMap(10)), results);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
           }
           break;
@@ -971,7 +971,7 @@ public class SimCloudManager implements SolrCloudManager {
           try {
             clusterStateProvider.simDeleteShard(new ZkNodeProps(params.toNamedList().asMap(10)), results);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
           }
           break;
@@ -1045,7 +1045,7 @@ public class SimCloudManager implements SolrCloudManager {
       try {
         return inner.call();
       } catch (InterruptedException ignored) {
-        ParWork.propegateInterrupt(ignored);
+        ParWork.propagateInterrupt(ignored);
         throw ignored;
       } catch (Throwable t) {
         // be forgiving of errors that occured as a result of interuption, even if
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index fbaa47f..28039cd 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -17,6 +17,34 @@
 
 package org.apache.solr.cloud.autoscaling.sim;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Random;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.stream.Collectors;
+
 import com.google.common.util.concurrent.AtomicDouble;
 import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
@@ -87,33 +115,6 @@ import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.TLOG_REPLICAS;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MODIFYCOLLECTION;
 import static org.apache.solr.common.params.CommonParams.NAME;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Random;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.stream.Collectors;
 
 /**
  * Simulated {@link ClusterStateProvider}.
@@ -519,7 +520,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     try {
       cloudManager.getDistribStateManager().makePath(path, Utils.toJSON(id), CreateMode.EPHEMERAL, false);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception saving overseer leader id", e);
     }
   }
@@ -1119,7 +1120,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
               return true;
             });
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new RuntimeException(e);
           }
         }
@@ -1143,7 +1144,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           return true;
         });
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new RuntimeException(e);
       }
     });
@@ -1207,7 +1208,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
                   try {
                     cloudManager.getSimNodeStateProvider().simSetNodeValue(n, "cores", cores.intValue() - 1);
                   } catch (InterruptedException e) {
-                    ParWork.propegateInterrupt(e);
+                    ParWork.propagateInterrupt(e);
                     throw new RuntimeException("interrupted", e);
                   }
                 }
@@ -1219,7 +1220,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       collectionsStatesRef.remove(collection);
       results.add("success", "");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception", e);
     } finally {
       lock.unlock();
@@ -1236,7 +1237,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         try {
           cloudManager.getDistribStateManager().removeRecursively(ZkStateReader.getCollectionPath(name), true, true);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Unable to delete collection state.json");
         }
       });
@@ -1389,7 +1390,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         // this also takes care of leader election
         simAddReplica(addReplicasProps, results);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new RuntimeException(e);
       }
       
@@ -1643,7 +1644,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       collectionsStatesRef.get(collectionName).invalidate();
       results.add("success", "");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       results.add("failure", e.toString());
     } finally {
       lock.unlock();
@@ -1669,7 +1670,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       CloudUtil.waitForState(cloudManager, CollectionAdminParams.SYSTEM_COLL, 120, TimeUnit.SECONDS,
           CloudUtil.clusterShape(1, Integer.parseInt(repFactor), false, true));
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new IOException(e);
     }
   }
@@ -1768,7 +1769,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             }
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new IOException(e);
         } finally {
           lock.unlock();
@@ -1813,7 +1814,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             simSetShardValue(collection, s.getName(), Variable.coreidxsize,
                 new AtomicDouble((Double)Type.CORE_IDX.convertVal(SimCloudManager.DEFAULT_IDX_SIZE_BYTES)), false, false);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new IOException(e);
           } finally {
             lock.unlock();
@@ -1942,7 +1943,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
               simSetShardValue(collection, sh, Variable.coreidxsize,
                   Type.CORE_IDX.convertVal(DEFAULT_DOC_SIZE_BYTES * count.get()), true, false);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new RuntimeException(e);
             }
           });
@@ -1980,7 +1981,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             return freedisk;
           });
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new RuntimeException(e);
         }
       });
@@ -2000,7 +2001,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             simSetShardValue(ri.getCollection(), ri.getShard(), "SEARCHER.searcher.maxDoc", numDocs, false, false);
             simSetShardValue(ri.getCollection(), ri.getShard(), "SEARCHER.searcher.deletedDocs", 0, false, false);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new RuntimeException(e);
           }
         });
@@ -2530,7 +2531,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         lock.unlock();
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new IOException(e);
     }
   }
@@ -2543,7 +2544,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         try {
           collectionStates.put(name, cached.getColl());
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new RuntimeException("error building collection " + name + " state", e);
         }
       });
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
index 52de831..4557bbc 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
@@ -35,10 +35,10 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.jute.Record;
+import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
 import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.BadVersionException;
-import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.NotEmptyException;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
 import org.apache.solr.cloud.ActionThrottle;
@@ -46,8 +46,8 @@ import org.apache.solr.common.ParWork;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.AutoScalingParams;
 import org.apache.solr.common.util.ExecutorUtil;
-import org.apache.solr.common.util.Utils;
 import org.apache.solr.common.util.SolrNamedThreadFactory;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.util.IdUtils;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
@@ -610,7 +610,7 @@ public class SimDistribStateManager implements DistribStateManager {
             throw new Exception("Unknown Op: " + op);
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           res.add(new OpResult.ErrorResult(KeeperException.Code.APIERROR.intValue()));
         }
       }
@@ -644,7 +644,7 @@ public class SimDistribStateManager implements DistribStateManager {
     try {
       makePath(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // ignore
     }
     setData(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH, Utils.toJSON(cfg), -1);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
index 29e9c37..27c18d3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
@@ -253,7 +253,7 @@ public class SimNodeStateProvider implements NodeStateProvider, Closeable {
     try {
       stateManager.setData(ZkStateReader.ROLES, Utils.toJSON(roles), -1);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new RuntimeException("Unexpected exception saving roles " + roles, e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
index ab915be..d041eb3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
@@ -17,6 +17,32 @@
 
 package org.apache.solr.cloud.autoscaling.sim;
 
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.io.Reader;
+import java.lang.invoke.MethodHandles;
+import java.net.URLDecoder;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.http.client.HttpClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrResponse;
@@ -58,32 +84,6 @@ import org.apache.solr.util.TimeOut;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintStream;
-import java.io.Reader;
-import java.lang.invoke.MethodHandles;
-import java.net.URLDecoder;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-
 /**
  * This class represents an autoscaling scenario consisting of a series of autoscaling
  * operations on a simulated cluster.
@@ -250,7 +250,7 @@ public class SimScenario implements AutoCloseable {
         try {
           return SimAction.valueOf(str.toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return null;
         }
       } else {
@@ -477,7 +477,7 @@ public class SimScenario implements AutoCloseable {
               AutoScalingConfig autoscalingConfig = scenario.cluster.getDistribStateManager().getAutoScalingConfig();
               return autoscalingConfig.getZkVersion() == scenario.cluster.getOverseerTriggerThread().getProcessedZnodeVersion();
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new RuntimeException("FAILED", e);
             }
           });
@@ -581,7 +581,7 @@ public class SimScenario implements AutoCloseable {
         try {
           scenario.cluster.request(operation);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Aborting - error executing suggestion {}", suggestion, e);
           break;
         }
@@ -863,7 +863,7 @@ public class SimScenario implements AutoCloseable {
         try {
           scenario.cluster.getSimClusterStateProvider().simSetShardValue(collection, shard, k, v, delta, divide);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new RuntimeException("Error setting shard value", e);
         }
       });
@@ -898,7 +898,7 @@ public class SimScenario implements AutoCloseable {
         try {
           return Condition.valueOf(p.toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return null;
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java
index de34534..0b7360c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/CollectionMutator.java
@@ -110,7 +110,7 @@ public class CollectionMutator {
       } catch (KeeperException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/OverseerAction.java b/solr/core/src/java/org/apache/solr/cloud/overseer/OverseerAction.java
index acc4b3a..b9016ab 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/OverseerAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/OverseerAction.java
@@ -16,10 +16,10 @@
  */
 package org.apache.solr.cloud.overseer;
 
-import org.apache.solr.common.ParWork;
-
 import java.util.Locale;
 
+import org.apache.solr.common.ParWork;
+
 /**
  * Enum of actions supported by the overseer only.
  *
@@ -41,7 +41,7 @@ public enum OverseerAction {
       try {
         return OverseerAction.valueOf(p.toUpperCase(Locale.ROOT));
       } catch (Exception ex) {
-        ParWork.propegateInterrupt(ex);
+        ParWork.propagateInterrupt(ex);
       }
     }
     return null;
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java
index 3f0e297..fe4d25b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ReplicaMutator.java
@@ -455,10 +455,10 @@ public class ReplicaMutator {
                   isLeaderSame = false;
                 }
               } catch (InterruptedException e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted", e);
               } catch (Exception e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 log.warn("Error occurred while checking if parent shard node is still live with the same zk session id. {}"
                         , "We cannot switch shard states at this time.", e);
                 return collection; // we aren't going to make any changes right now
@@ -499,10 +499,10 @@ public class ReplicaMutator {
             try {
               SplitShardCmd.unlockForSplit(cloudManager, collection.getName(), parentSliceName);
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted", e);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               log.warn("Failed to unlock shard after {} successful split: {} / {}"
                   , (isLeaderSame ? "" : "un"), collection.getName(), parentSliceName);
             }
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java
index a6361de..cc0f572 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java
@@ -16,6 +16,14 @@
  */
 package org.apache.solr.cloud.overseer;
 
+import java.lang.invoke.MethodHandles;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
 import com.codahale.metrics.Timer;
 import org.apache.solr.cloud.Stats;
 import org.apache.solr.common.AlreadyClosedException;
@@ -23,34 +31,16 @@ import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.TimeOut;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.common.util.Utils;
-import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.util.Collections.singletonMap;
-import javax.print.Doc;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Deque;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
 
 
 public class ZkStateWriter {
@@ -182,17 +172,17 @@ public class ZkStateWriter {
           updatesToWrite.put(name, newCollection);
         }
       } catch (InterruptedException | AlreadyClosedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw e;
       } catch (KeeperException.SessionExpiredException e) {
         throw e;
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (e instanceof KeeperException.BadVersionException) {
           log.warn("Tried to update the cluster state using but we where rejected, currently at {}", c == null ? "null" : c.getZNodeVersion(), e);
           throw e;
         }
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed processing update=" + c + "\n" + prevState, e);
       }
     }
@@ -270,12 +260,12 @@ public class ZkStateWriter {
             }
 
         } catch (InterruptedException | AlreadyClosedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw e;
         } catch (KeeperException.SessionExpiredException e) {
           throw e;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
 //          if (e instanceof KeeperException.BadVersionException) {
 //            // nocommit invalidState = true;
 //            //if (log.isDebugEnabled())
@@ -285,7 +275,7 @@ public class ZkStateWriter {
 //            prevState = reader.getClusterState();
 //            continue;
 //          }
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Failed processing update=" + c, e);
         }
diff --git a/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java b/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java
index 3a71831..5babc3d 100644
--- a/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java
+++ b/solr/core/src/java/org/apache/solr/cloud/rule/ReplicaAssigner.java
@@ -341,7 +341,7 @@ public class ReplicaAssigner {
       try {
         snitches.put(c, new SnitchInfoImpl(Collections.EMPTY_MAP, (Snitch) c.getConstructor().newInstance(), cloudManager));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error instantiating Snitch " + c.getName());
       }
     }
@@ -369,7 +369,7 @@ public class ReplicaAssigner {
           try {
             info.snitch.getTags(node, info.myTags, context);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             context.exception = e;
           }
         }
@@ -439,7 +439,7 @@ public class ReplicaAssigner {
             (Snitch) Snitch.class.getClassLoader().loadClass(klas).getConstructor().newInstance() ;
         snitches.put(inst.getClass(), new SnitchInfoImpl(map, inst, cloudManager));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
 
       }
diff --git a/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java b/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java
index a44fb18..7e98b39 100644
--- a/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java
+++ b/solr/core/src/java/org/apache/solr/cloud/rule/Rule.java
@@ -321,7 +321,7 @@ public class Rule {
         }
 
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new IllegalArgumentException("Invalid condition : " + key + ":" + val, e);
       }
       this.val = expectedVal;
diff --git a/solr/core/src/java/org/apache/solr/core/BlobRepository.java b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
index f38473c..4125ef7 100644
--- a/solr/core/src/java/org/apache/solr/core/BlobRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
@@ -140,7 +140,7 @@ public class BlobRepository {
           try {
             aBlob = blobCreator.call();
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Blob loading failed: " + e.getMessage(), e);
           }
         }
@@ -226,7 +226,7 @@ public class BlobRepository {
         throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such blob or version available: " + key);
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if (e instanceof SolrException) {
         throw (SolrException) e;
       } else {
diff --git a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
index abcfbfa..afc860e 100644
--- a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
@@ -214,10 +214,10 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
             }
           }
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt("Interrupted closing directory", e);
+          ParWork.propagateInterrupt("Interrupted closing directory", e);
           return;
         } catch (Exception e) {
-          ParWork.propegateInterrupt("Error closing directory", e);
+          ParWork.propagateInterrupt("Error closing directory", e);
         }
       }
 
@@ -233,7 +233,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
             }
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt("Error closing directory", e);
+          ParWork.propagateInterrupt("Error closing directory", e);
         }
       }
 
@@ -242,7 +242,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
         try {
           removeDirectory(val);
         } catch (Exception e) {
-          ParWork.propegateInterrupt("Error removing directory", e);
+          ParWork.propagateInterrupt("Error removing directory", e);
         }
       }
 
@@ -286,7 +286,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
         } catch (Exception e) {
           log.error("closeCacheValue(CacheValue=" + cacheValue + ")", e);
 
-          ParWork.propegateInterrupt("Error executing preClose for directory", e);
+          ParWork.propagateInterrupt("Error executing preClose for directory", e);
         }
       }
     }
@@ -331,7 +331,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
         try {
           removeDirectory(val);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("closeCacheValue(CacheValue=" + cacheValue + ")", e);
 
           SolrException.log(log, "Error removing directory " + val.path + " before core close", e);
@@ -346,10 +346,10 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
         try {
           listener.postClose();
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("closeCacheValue(CacheValue=" + cacheValue + ")", e);
 
-          ParWork.propegateInterrupt("Error executing postClose for directory", e);
+          ParWork.propagateInterrupt("Error executing postClose for directory", e);
         }
       }
     }
@@ -378,7 +378,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
     } catch (Exception e) {
       log.error("close(CacheValue=" + val + ")", e);
 
-      ParWork.propegateInterrupt("Error closing directory", e);
+      ParWork.propagateInterrupt("Error closing directory", e);
     }
 
     if (log.isDebugEnabled()) {
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java b/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java
index cd3971e..ab8cb5c 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSetProperties.java
@@ -71,7 +71,7 @@ public class ConfigSetProperties {
       }
       return new NamedList();
     } catch (Exception ex) {
-      ParWork.propegateInterrupt(ex, true);
+      ParWork.propagateInterrupt(ex, true);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to load reader for ConfigSet properties: " + name, ex);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
index 9687dab..77e3e6b 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
@@ -16,6 +16,13 @@
  */
 package org.apache.solr.core;
 
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
 import com.github.benmanes.caffeine.cache.Cache;
 import com.github.benmanes.caffeine.cache.Caffeine;
 import org.apache.solr.cloud.CloudConfigSetService;
@@ -29,13 +36,6 @@ import org.apache.solr.schema.IndexSchemaFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-
 /**
  * Service class used by the CoreContainer to load ConfigSets for use in SolrCore
  * creation.
@@ -94,7 +94,7 @@ public abstract class ConfigSetService {
       IndexSchema schema = createIndexSchema(dcore, solrConfig);
       return new ConfigSet(configSetName(dcore), solrConfig, schema, properties, true);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Could not load conf for core " + dcore.getName() +
               ": " + e.getMessage(), e);
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 201f15c..fa6fe34 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -55,7 +55,6 @@ import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.store.Directory;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
-import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
 import org.apache.solr.client.solrj.impl.CloudHttp2SolrClient;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
@@ -81,6 +80,7 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.ObjectCache;
 import org.apache.solr.common.util.ObjectReleaseTracker;
+import org.apache.solr.common.util.OrderedExecutor;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.backup.repository.BackupRepository;
@@ -125,7 +125,6 @@ import org.apache.solr.security.PublicKeyHandler;
 import org.apache.solr.security.SecurityPluginHolder;
 import org.apache.solr.update.SolrCoreState;
 import org.apache.solr.update.UpdateShardHandler;
-import org.apache.solr.common.util.OrderedExecutor;
 import org.apache.solr.util.RefCounted;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
@@ -438,7 +437,7 @@ public class CoreContainer implements Closeable {
       try {
         old.plugin.close();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Exception while attempting to close old authorization plugin", e);
       }
     }
@@ -478,7 +477,7 @@ public class CoreContainer implements Closeable {
       try {
         old.plugin.close();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Exception while attempting to close old auditlogger plugin", e);
       }
     }
@@ -535,7 +534,7 @@ public class CoreContainer implements Closeable {
     try {
       if (old != null) old.plugin.close();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Exception while attempting to close old authentication plugin", e);
     }
 
@@ -620,7 +619,7 @@ public class CoreContainer implements Closeable {
     try {
       cc.load();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       cc.shutdown();
       throw e;
     }
@@ -904,7 +903,7 @@ public class CoreContainer implements Closeable {
           try {
             future.get();
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           } catch (ExecutionException e) {
             log.error("Error waiting for SolrCore to be loaded on startup", e.getCause());
           }
@@ -914,7 +913,7 @@ public class CoreContainer implements Closeable {
             try {
               future.get();
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
             } catch (ExecutionException e) {
               log.error("Error waiting for SolrCore to be loaded on startup", e.getCause());
             }
@@ -1041,7 +1040,7 @@ public class CoreContainer implements Closeable {
         cancelCoreRecoveries(false, true);
       } catch (Exception e) {
 
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Exception trying to cancel recoveries on shutdown", e);
       }
     }
@@ -1215,7 +1214,7 @@ public class CoreContainer implements Closeable {
         try {
           zkSys.registerInZk(core, skipRecovery).get();
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
         } catch (ExecutionException e) {
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
@@ -1229,7 +1228,7 @@ public class CoreContainer implements Closeable {
         try {
           zkSys.registerInZk(core, skipRecovery).get();
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
         } catch (ExecutionException e) {
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
@@ -1298,7 +1297,7 @@ public class CoreContainer implements Closeable {
 
       return core;
     } catch (Exception ex) {
-      ParWork.propegateInterrupt(ex);
+      ParWork.propagateInterrupt(ex);
       // First clean up any core descriptor, there should never be an existing core.properties file for any core that
       // failed to be created on-the-fly.
       coresLocator.delete(this, cd);
@@ -1306,7 +1305,7 @@ public class CoreContainer implements Closeable {
         try {
           getZkController().unregister(coreName, cd);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           SolrException.log(log, null, e);
         } catch (KeeperException e) {
           SolrException.log(log, null, e);
@@ -1400,7 +1399,7 @@ public class CoreContainer implements Closeable {
 
       return core;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Unable to create SolrCore", e);
       coreInitFailures.put(dcore.getName(), new CoreLoadFailure(dcore, e));
       if (e instanceof ZkController.NotInClusterStateException && !newCollection) {
@@ -1715,13 +1714,13 @@ public class CoreContainer implements Closeable {
         } catch (SolrCoreState.CoreIsClosedException e) {
           throw e;
         } catch (Exception e) {
-          ParWork.propegateInterrupt("Exception reloading SolrCore", e);
+          ParWork.propagateInterrupt("Exception reloading SolrCore", e);
           SolrException exp = new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e);
           try {
             coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e));
 
           } catch (Exception e1) {
-            ParWork.propegateInterrupt(e1);
+            ParWork.propagateInterrupt(e1);
             exp.addSuppressed(e1);
           }
           throw exp;
@@ -1829,7 +1828,7 @@ public class CoreContainer implements Closeable {
         try {
           zkSys.getZkController().unregister(name, cd);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(ErrorCode.SERVER_ERROR, "Interrupted while unregistering core [" + name + "] from cloud state");
         } catch (KeeperException e) {
           throw new SolrException(ErrorCode.SERVER_ERROR, "Error unregistering core [" + name + "] from cloud state", e);
diff --git a/solr/core/src/java/org/apache/solr/core/Diagnostics.java b/solr/core/src/java/org/apache/solr/core/Diagnostics.java
index 0fd4c3c..6901969 100644
--- a/solr/core/src/java/org/apache/solr/core/Diagnostics.java
+++ b/solr/core/src/java/org/apache/solr/core/Diagnostics.java
@@ -15,14 +15,15 @@
  * limitations under the License.
  */
 package org.apache.solr.core;
-import org.apache.solr.common.ParWork;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import java.lang.invoke.MethodHandles;
 import java.lang.management.ManagementFactory;
 import java.lang.management.ThreadInfo;
 
+import org.apache.solr.common.ParWork;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 public class Diagnostics {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -34,7 +35,7 @@ public class Diagnostics {
     try {
       callable.call(data);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("TEST HOOK EXCEPTION", e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
index bc414f8..919b3a8 100644
--- a/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/DirectoryFactory.java
@@ -18,7 +18,6 @@ package org.apache.solr.core;
 
 import java.io.Closeable;
 import java.io.File;
-import java.io.FileFilter;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
@@ -207,7 +206,7 @@ public abstract class DirectoryFactory implements NamedListInitializedPlugin,
     } catch (FileNotFoundException | NoSuchFileException e) {
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Exception deleting file", e);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
index eefed09..b615139 100644
--- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
@@ -375,7 +375,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
         throw new RuntimeException("Could not remove directory");
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Could not remove directory", e);
       throw new SolrException(ErrorCode.SERVER_ERROR,
           "Could not remove directory", e);
diff --git a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
index 2370e6d..92973e7 100644
--- a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
@@ -64,7 +64,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
           lib.loadJar();
           lib.verify();
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Error loading runtime library", e);
         }
         count++;
@@ -81,7 +81,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
         lib.loadJar();
         lib.verify();
       } catch (Exception exception) {
-        ParWork.propegateInterrupt(exception);
+        ParWork.propagateInterrupt(exception);
         errors.add(exception.getMessage());
         if (exception instanceof SolrException) throw (SolrException) exception;
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Atleast one runtimeLib could not be loaded", exception);
@@ -97,7 +97,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
     try {
       return parentLoader.findClass(name, Object.class);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       return loadFromRuntimeLibs(name);
     }
   }
@@ -112,7 +112,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
     try {
       buf = getByteBuffer(name, jarName);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new ClassNotFoundException("class could not be loaded " + name + (errors.isEmpty()? "": "Some dynamic libraries could not be loaded: "+ StrUtils.join(errors, '|')), e);
     }
     if (buf == null) throw new ClassNotFoundException("Class not found :" + name);
@@ -148,7 +148,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
           break;
         }
       } catch (Exception exp) {
-        ParWork.propegateInterrupt(exp);
+        ParWork.propagateInterrupt(exp);
         throw new ClassNotFoundException("Unable to load class :" + name, exp);
       }
     }
@@ -162,7 +162,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
       try {
         lib.close();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Error closing lib {}", lib.getName(), e);
       }
     }
@@ -175,7 +175,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
       ByteBuffer buf = getByteBuffer(resource, jarName);
       if (buf == null) throw new IOException("Resource could not be found " + resource);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new IOException("Resource could not be found " + resource, e);
     }
     return null;
@@ -187,7 +187,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
     try {
       return findClass(cname).asSubclass(expectedType);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if (e instanceof SolrException) {
         throw (SolrException) e;
       } else {
@@ -202,10 +202,10 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
     try {
       return findClass(cname, expectedType).getConstructor().newInstance();
     } catch (SolrException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error instantiating class :" + cname, e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java
index 5168604..5cb2c540 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginBag.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java
@@ -23,7 +23,6 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -376,7 +375,7 @@ public class PluginBag<T> implements AutoCloseable {
     try {
       if (inst != null && inst instanceof AutoCloseable) ((AutoCloseable) inst).close();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Error closing {}", inst , e);
     }
   }
@@ -420,7 +419,7 @@ public class PluginBag<T> implements AutoCloseable {
           try {
             ((AutoCloseable) myInst).close();
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.error("Error closing {}", inst , e);
           }
         }
@@ -644,7 +643,7 @@ public class PluginBag<T> implements AutoCloseable {
         try {
           rtl.init(lib);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("error loading runtime library", e);
         }
         l.add(rtl);
@@ -679,7 +678,7 @@ public class PluginBag<T> implements AutoCloseable {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version);
         log.info("Jar {} signed with {} successfully verified", name, matchedKey);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (e instanceof SolrException) throw e;
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e);
       }
diff --git a/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java b/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java
index 4df49e3..a408b0e 100644
--- a/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java
+++ b/solr/core/src/java/org/apache/solr/core/QuerySenderListener.java
@@ -93,7 +93,7 @@ public class QuerySenderListener extends AbstractSolrEventListener {
         }
 
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         // do nothing... we want to continue with the other requests.
         // the failure should have already been logged.
       } finally {
diff --git a/solr/core/src/java/org/apache/solr/core/RequestParams.java b/solr/core/src/java/org/apache/solr/core/RequestParams.java
index a16bee0..2c14b26 100644
--- a/solr/core/src/java/org/apache/solr/core/RequestParams.java
+++ b/solr/core/src/java/org/apache/solr/core/RequestParams.java
@@ -198,7 +198,7 @@ public class RequestParams implements MapSerializable {
         Map m = (Map) fromJSON (in);
         return new Object[]{m, version};
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing conf resource " + name, e);
       }
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index 1ad4848..cf94c72 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -48,7 +48,6 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.commons.collections.map.UnmodifiableOrderedMap;
 import org.apache.commons.io.FileUtils;
 import org.apache.lucene.index.IndexDeletionPolicy;
 import org.apache.lucene.search.IndexSearcher;
@@ -200,7 +199,7 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
     try {
       return new SolrConfig(loader, name, isConfigsetTrusted, substitutableProperties);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       String resource;
       if (loader instanceof ZkSolrResourceLoader) {
         resource = name;
@@ -469,7 +468,7 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
       Map m = (Map) fromJSON(in);
       return new ConfigOverlay(m, version);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error reading config overlay", e);
     } finally {
       IOUtils.closeQuietly(isr);
@@ -633,7 +632,7 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
         try {
           return valueOf(s.toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Unrecognized value for lastModFrom: {}", s, e);
           return BOGUS;
         }
@@ -667,7 +666,7 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
               ? Long.valueOf(ttlStr)
               : null;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Ignoring exception while attempting to extract max-age from cacheControl config: {}"
               , cacheControlHeader, e);
         }
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index e24c19e..24dc4b6 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -79,7 +79,6 @@ import org.apache.lucene.store.LockObtainFailedException;
 import org.apache.solr.client.solrj.impl.BinaryResponseParser;
 import org.apache.solr.cloud.CloudDescriptor;
 import org.apache.solr.cloud.RecoveryStrategy;
-import org.apache.solr.cloud.ZkController;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
 import org.apache.solr.common.AlreadyClosedException;
 import org.apache.solr.common.ParWork;
@@ -107,7 +106,6 @@ import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager.SnapshotMetaDa
 import org.apache.solr.handler.IndexFetcher;
 import org.apache.solr.handler.ReplicationHandler;
 import org.apache.solr.handler.RequestHandlerBase;
-import org.apache.solr.handler.SolrConfigHandler;
 import org.apache.solr.handler.component.HighlightComponent;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.logging.MDCLoggingContext;
@@ -404,7 +402,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       lastNewIndexDir = result;
       return result;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // See SOLR-11687. It is inadvisable to assume we can do the right thing for any but a small
       // number of exceptions that ware caught and swallowed in getIndexProperty.
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error in getNewIndexDir, exception: ", e);
@@ -413,7 +411,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
         try {
           getDirectoryFactory().release(dir);
         } catch (Exception e) {
-          ParWork.propegateInterrupt( "Error releasing directory", e);
+          ParWork.propagateInterrupt( "Error releasing directory", e);
           throw new SolrException(ErrorCode.SERVER_ERROR, "Error releasing directory: ", e);
         }
       }
@@ -551,7 +549,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
               getSolrConfig().indexConfig.lockType);
       return new SolrSnapshotMetaDataManager(this, snapshotDir);
     } catch (Throwable e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
 
       try {
         if (snapshotDir != null) {
@@ -822,7 +820,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       try (SolrIndexWriter writer = SolrIndexWriter.buildIndexWriter(this, "SolrCore.initIndex", indexDir, getDirectoryFactory(),
               true, getLatestSchema(), solrConfig.indexConfig, solrDelPolicy, codec)) {
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
       }
     }
@@ -873,7 +871,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
     } catch (SolrException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
 
       // The JVM likes to wrap our helpful SolrExceptions in things like
       // "InvocationTargetException" that have no useful getMessage
@@ -904,7 +902,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
     } catch (SolrException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // The JVM likes to wrap our helpful SolrExceptions in things like
       // "InvocationTargetException" that have no useful getMessage
       if (null != e.getCause() && e.getCause() instanceof SolrException) {
@@ -1107,14 +1105,14 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       // release the latch, otherwise we block trying to do the close. This
       // should be fine, since counting down on a latch of 0 is still fine
       searcherReadyLatch.countDown();
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       try {
         // close down the searcher and any other resources, if it exists, as this
         // is not recoverable
         //onDeckSearchers.set(0);
         close();
       } catch (Throwable t) {
-        ParWork.propegateInterrupt("Error while closing", t);
+        ParWork.propagateInterrupt("Error while closing", t);
       }
 
       String msg;
@@ -1373,7 +1371,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       try {
         p.load(new InputStreamReader(is, StandardCharsets.UTF_8));
       } catch (Exception e) {
-        ParWork.propegateInterrupt("Unable to load " + IndexFetcher.INDEX_PROPERTIES, e);
+        ParWork.propagateInterrupt("Unable to load " + IndexFetcher.INDEX_PROPERTIES, e);
       } finally {
         IOUtils.closeQuietly(is);
       }
@@ -1391,7 +1389,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       p.store(os, IndexFetcher.INDEX_PROPERTIES);
       dir.sync(Collections.singleton(tmpFileName));
     } catch (Exception e) {
-      ParWork.propegateInterrupt("Unable to write " + IndexFetcher.INDEX_PROPERTIES, e);
+      ParWork.propagateInterrupt("Unable to write " + IndexFetcher.INDEX_PROPERTIES, e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to write " + IndexFetcher.INDEX_PROPERTIES, e);
     } finally {
       IOUtils.closeQuietly(os);
@@ -1419,7 +1417,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
         try {
           closeAndWait.wait(500);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
     }
@@ -2234,7 +2232,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       return newSearcher;
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error opening new searcher", e);
     } finally {
       openSearcherLock.unlock();
@@ -2319,7 +2317,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
           try {
             searcherLock.wait();
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           }
         }
 
@@ -2361,7 +2359,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
           try {
             searcherLock.wait();
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           }
           continue;  // go back to the top of the loop and retry
         } else if (onDeckSearchers.get() > 1) {
@@ -2428,7 +2426,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
             try {
               newSearcher.warm(currSearcher);
             } catch (Throwable e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
             } finally {
               warmupContext.close();
             }
@@ -2478,7 +2476,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
                 registerSearcher(newSearchHolder);
                 registered.set(true);
               } catch (Throwable e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
               } finally {
                 // we are all done with the old searcher we used
                 // for warming...
@@ -2498,7 +2496,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       return returnSearcher ? newSearchHolder : null;
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if (e instanceof RuntimeException) throw (RuntimeException) e;
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     } finally {
@@ -2577,7 +2575,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
         } catch (Exception e) {
           // do not allow decref() operations to fail since they are typically called in finally blocks
           // and throwing another exception would be very unexpected.
-          ParWork.propegateInterrupt("Error opening new searcher", e);
+          ParWork.propagateInterrupt("Error opening new searcher", e);
         }
       }
     };
@@ -2634,10 +2632,10 @@ public final class SolrCore implements SolrInfoBean, Closeable {
         }
         success = true;
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         newSearcherHolder.decref();
         // an exception in register() shouldn't be fatal.
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       } finally {
         // wake up anyone waiting for a searcher
         // even in the face of errors.
@@ -2832,7 +2830,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       m.put("xlsx",
           (QueryResponseWriter) Class.forName("org.apache.solr.handler.extraction.XLSXResponseWriter").getConstructor().newInstance());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e, true);
+      ParWork.propagateInterrupt(e, true);
       //don't worry; solrcell contrib not in class path
     }
   }
@@ -2925,7 +2923,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
         result.put(e.getKey(), (T) o);
       } catch (Exception exp) {
         //should never happen
-        ParWork.propegateInterrupt(exp);
+        ParWork.propagateInterrupt(exp);
         throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to instantiate class", exp);
       }
     }
@@ -3079,7 +3077,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       try {
         directoryFactory.remove(getIndexDir());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, "Failed to flag index dir for removal for core:" + name + " dir:" + getIndexDir());
       }
     }
@@ -3087,7 +3085,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
       try {
         directoryFactory.remove(getDataDir(), true);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, "Failed to flag data dir for removal for core:" + name + " dir:" + getDataDir());
       }
     }
@@ -3193,7 +3191,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
               try {
                 listener.run();
               } catch (Exception e) {
-                ParWork.propegateInterrupt("Error in listener ", e);
+                ParWork.propagateInterrupt("Error in listener ", e);
               }
             });
 
@@ -3232,7 +3230,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
     } catch (KeeperException e) {
       log.error("error refreshing solrconfig ", e);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
     }
     return false;
   }
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCores.java b/solr/core/src/java/org/apache/solr/core/SolrCores.java
index c24d985..80bebca 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCores.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCores.java
@@ -16,15 +16,6 @@
  */
 package org.apache.solr.core;
 
-import com.google.common.collect.Lists;
-import org.apache.http.annotation.Experimental;
-import org.apache.solr.common.AlreadyClosedException;
-import org.apache.solr.common.ParWork;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.logging.MDCLoggingContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.Closeable;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
@@ -36,6 +27,15 @@ import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.collect.Lists;
+import org.apache.http.annotation.Experimental;
+import org.apache.solr.common.AlreadyClosedException;
+import org.apache.solr.common.ParWork;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.logging.MDCLoggingContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 class SolrCores implements Closeable {
   private final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -129,7 +129,7 @@ class SolrCores implements Closeable {
             core.closeAndWait();
           } catch (Throwable e) {
             log.error("Error closing SolrCore", e);
-            ParWork.propegateInterrupt("Error shutting down core", e);
+            ParWork.propagateInterrupt("Error shutting down core", e);
           } finally {
             MDCLoggingContext.clear();
           }
@@ -386,7 +386,7 @@ class SolrCores implements Closeable {
           try {
             loadingSignal.wait(1000);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             return;
           }
         }
@@ -407,7 +407,7 @@ class SolrCores implements Closeable {
           try {
             loadingSignal.wait(1000);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             return;
           }
         }
diff --git a/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java b/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java
index c9c807d..7110841 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrDeletionPolicy.java
@@ -178,7 +178,7 @@ public class SolrDeletionPolicy extends IndexDeletionPolicy implements NamedList
             }
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Exception while checking commit point's age for deletion", e);
         }
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrPaths.java b/solr/core/src/java/org/apache/solr/core/SolrPaths.java
index 884ff07..1743c00 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrPaths.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrPaths.java
@@ -105,7 +105,7 @@ public final class SolrPaths {
           log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.", USER_FILES_DIRECTORY, solrHome);
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.",
             USER_FILES_DIRECTORY, solrHome, e);
       }
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index a5057c9..9cb8b52 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -16,35 +16,6 @@
  */
 package org.apache.solr.core;
 
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.lucene.analysis.WordlistLoader;
-import org.apache.lucene.analysis.util.CharFilterFactory;
-import org.apache.lucene.analysis.util.ResourceLoader;
-import org.apache.lucene.analysis.util.ResourceLoaderAware;
-import org.apache.lucene.analysis.util.TokenFilterFactory;
-import org.apache.lucene.analysis.util.TokenizerFactory;
-import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.DocValuesFormat;
-import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.util.IOUtils;
-import org.apache.solr.common.ParWork;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.XMLErrorLogger;
-import org.apache.solr.handler.component.SearchComponent;
-import org.apache.solr.handler.component.ShardHandlerFactory;
-import org.apache.solr.request.SolrRequestHandler;
-import org.apache.solr.response.QueryResponseWriter;
-import org.apache.solr.rest.RestManager;
-import org.apache.solr.schema.FieldType;
-import org.apache.solr.schema.ManagedIndexSchemaFactory;
-import org.apache.solr.schema.SimilarityFactory;
-import org.apache.solr.search.QParserPlugin;
-import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
-import org.apache.solr.util.SystemIdResolver;
-import org.apache.solr.util.plugin.SolrCoreAware;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.Closeable;
 import java.io.File;
 import java.io.IOException;
@@ -74,6 +45,35 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.lucene.analysis.WordlistLoader;
+import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.ResourceLoader;
+import org.apache.lucene.analysis.util.ResourceLoaderAware;
+import org.apache.lucene.analysis.util.TokenFilterFactory;
+import org.apache.lucene.analysis.util.TokenizerFactory;
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.codecs.PostingsFormat;
+import org.apache.lucene.util.IOUtils;
+import org.apache.solr.common.ParWork;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.XMLErrorLogger;
+import org.apache.solr.handler.component.SearchComponent;
+import org.apache.solr.handler.component.ShardHandlerFactory;
+import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.response.QueryResponseWriter;
+import org.apache.solr.rest.RestManager;
+import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.ManagedIndexSchemaFactory;
+import org.apache.solr.schema.SimilarityFactory;
+import org.apache.solr.search.QParserPlugin;
+import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
+import org.apache.solr.util.SystemIdResolver;
+import org.apache.solr.util.plugin.SolrCoreAware;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
  * @since solr 1.3
  */
@@ -731,7 +731,7 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
       throw err;
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Error instantiating class: '" + clazz.getName() + "'", e);
     }
@@ -791,7 +791,7 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
             try {
               aware.inform(core);
             } catch (Exception e) {
-              ParWork.propegateInterrupt("Exception informing for SolrCore", e);
+              ParWork.propagateInterrupt("Exception informing for SolrCore", e);
               throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception informing for SolrCore", e);
             }
             waitingForCore.remove(aware);
@@ -815,7 +815,7 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
             try {
               r.inform(loader);
             } catch (Exception e) {
-              ParWork.propegateInterrupt("Exception informing for ResourceLoader", e);
+              ParWork.propagateInterrupt("Exception informing for ResourceLoader", e);
               throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception informing for ResourceLoader", e);
             }
             waitingForResources.remove(r);
@@ -843,7 +843,7 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
                 infoRegistry.put(imb.getName(), imb);
                 infoMBeans.remove(imb);
               } catch (Exception e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 log.warn("could not register MBean '" + imb.getName() + "'.", e);
               }
           });
diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
index fff409e..28de596 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
@@ -16,6 +16,25 @@
  */
 package org.apache.solr.core;
 
+import javax.management.MBeanServer;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
 import com.google.common.base.Strings;
 import org.apache.commons.io.IOUtils;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
@@ -24,7 +43,6 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.logging.LogWatcherConfig;
 import org.apache.solr.metrics.reporters.SolrJmxReporter;
-import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.update.UpdateShardHandlerConfig;
 import org.apache.solr.util.DOMUtil;
 import org.apache.solr.util.JmxUtil;
@@ -36,24 +54,6 @@ import org.w3c.dom.NodeList;
 import org.xml.sax.InputSource;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
-import javax.management.MBeanServer;
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpression;
-import javax.xml.xpath.XPathExpressionException;
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
 
 
 /**
@@ -228,7 +228,7 @@ public class SolrXmlConfig {
     } catch (SolrException exc) {
       throw exc;
     } catch (Exception exc) {
-      ParWork.propegateInterrupt(exc);
+      ParWork.propagateInterrupt(exc);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Could not load SOLR configuration", exc);
     }
@@ -269,7 +269,7 @@ public class SolrXmlConfig {
       log.error("Exception reading config", exc);
       throw exc;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
index d973465..0056790 100644
--- a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
@@ -61,7 +61,7 @@ public abstract class TransientSolrCoreCacheFactory {
       tccf.setCoreContainer(coreContainer);
       return tccf;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // Many things could cuse this, bad solrconfig, mis-typed class name, whatever. However, this should not
       // keep the enclosing coreContainer from instantiating, so log an error and continue.
       log.error(String.format(Locale.ROOT, "Error instantiating TransientSolrCoreCacheFactory class [%s]: %s",
diff --git a/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java b/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java
index b97bbe1..41527a8 100644
--- a/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java
+++ b/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java
@@ -16,6 +16,25 @@
  */
 package org.apache.solr.core;
 
+import javax.xml.namespace.QName;
+import javax.xml.transform.sax.SAXSource;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
 import net.sf.saxon.Configuration;
 import net.sf.saxon.dom.DocumentOverNodeInfo;
 import net.sf.saxon.event.Sender;
@@ -40,25 +59,6 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.InputSource;
 
-import javax.xml.namespace.QName;
-import javax.xml.transform.sax.SAXSource;
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpression;
-import javax.xml.xpath.XPathExpressionException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
 /**
  * Wrapper around an XML DOM object to provide convenient accessors to it.  Intended for XML config files.
  */
@@ -313,7 +313,7 @@ public class XmlConfigFile { // formerly simply "Config"
       } catch (SolrException e) {
         throw (e);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, "Error in xpath", e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
             "Error in xpath:" + path + " for " + name, e);
@@ -347,7 +347,7 @@ public class XmlConfigFile { // formerly simply "Config"
       } catch (SolrException e) {
         throw (e);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, "Error in xpath", e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
             "Error in xpath:" + xstr + " for " + name, e);
diff --git a/solr/core/src/java/org/apache/solr/core/ZkContainer.java b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
index d5f3150..93e1a76 100644
--- a/solr/core/src/java/org/apache/solr/core/ZkContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
@@ -156,7 +156,7 @@ public class ZkContainer implements Closeable {
 
         if (log.isDebugEnabled()) log.debug("done zkController create");
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
             "", e);
       } catch (TimeoutException e) {
@@ -215,12 +215,12 @@ public class ZkContainer implements Closeable {
               zkController.register(core.getName(), cd, skipRecovery);
             }
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             SolrException exp = new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
             try {
               zkController.publish(cd, Replica.State.DOWN);
             } catch (Exception e1) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               exp.addSuppressed(e1);
             }
             throw exp;
diff --git a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java
index 48002b3..0b6f776 100644
--- a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java
+++ b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotMetaDataManager.java
@@ -193,7 +193,7 @@ public class SolrSnapshotMetaDataManager {
         try {
           release(name);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           // Suppress so we keep throwing original exception
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java
index ab02081..5463491 100644
--- a/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java
+++ b/solr/core/src/java/org/apache/solr/core/snapshots/SolrSnapshotsTool.java
@@ -40,6 +40,7 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.Optional;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.HelpFormatter;
@@ -63,8 +64,6 @@ import org.apache.solr.util.CLIO;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-
 /**
  * This class provides utility functions required for Solr snapshots functionality.
  */
@@ -112,7 +111,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
       CLIO.out("Successfully created snapshot with name " + snapshotName + " for collection " + collectionName);
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Failed to create a snapshot with name {} for collection {}", snapshotName, collectionName, e);
       CLIO.out("Failed to create a snapshot with name " + snapshotName + " for collection " + collectionName
           +" due to following error : "+e.getLocalizedMessage());
@@ -128,7 +127,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
       CLIO.out("Successfully deleted snapshot with name " + snapshotName + " for collection " + collectionName);
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Failed to delete a snapshot with name {} for collection {}", snapshotName, collectionName, e);
       CLIO.out("Failed to delete a snapshot with name " + snapshotName + " for collection " + collectionName
           +" due to following error : "+e.getLocalizedMessage());
@@ -149,7 +148,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
       }
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Failed to list snapshots for collection {}", collectionName, e);
       CLIO.out("Failed to list snapshots for collection " + collectionName
           +" due to following error : "+e.getLocalizedMessage());
@@ -182,7 +181,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
         }
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Failed to fetch snapshot details", e);
       CLIO.out("Failed to fetch snapshot details due to following error : " + e.getLocalizedMessage());
     }
@@ -280,7 +279,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
       buildCopyListings(collectionName, snapshotName, localFsPath, pathPrefix);
       CLIO.out("Successfully prepared copylisting for the snapshot export.");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Failed to prepare a copylisting for snapshot with name {} for collection {}", snapshotName, collectionName, e);
 
       CLIO.out("Failed to prepare a copylisting for snapshot with name " + snapshotName + " for collection "
@@ -292,7 +291,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
       backupCollectionMetaData(collectionName, snapshotName, destPath);
       CLIO.out("Successfully backed up collection meta-data");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Failed to backup collection meta-data for collection {}", collectionName, e);
       CLIO.out("Failed to backup collection meta-data for collection " + collectionName
           + " due to following error : " + e.getLocalizedMessage());
@@ -313,7 +312,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
       // if asyncId is null, processAsync will block and throw an Exception with any error
       backup.processAsync(asyncReqId.orElse(null), solrClient);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Failed to backup collection meta-data for collection {}", collectionName, e);
       CLIO.out("Failed to backup collection meta-data for collection " + collectionName
           + " due to following error : " + e.getLocalizedMessage());
diff --git a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
index 038e8be..a854aeb 100644
--- a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
+++ b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
@@ -170,7 +170,7 @@ public class DistribPackageStore implements PackageStore {
             return true;
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SERVER_ERROR, "unable to parse metadata json file");
         }
       } else {
@@ -245,7 +245,7 @@ public class DistribPackageStore implements PackageStore {
             if (success) return true;
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           //it's OK for some nodes to fail
         }
       }
@@ -283,7 +283,7 @@ public class DistribPackageStore implements PackageStore {
           try {
             return readMetaData();
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new RuntimeException(e);
           }
         }
@@ -351,7 +351,7 @@ public class DistribPackageStore implements PackageStore {
       coreContainer.getZkController().getZkClient().create(ZK_PACKAGESTORE + info.path, info.getDetails().getMetaData().sha512.getBytes(UTF_8),
           CreateMode.PERSISTENT, true);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SERVER_ERROR, "Unable to create an entry in ZK", e);
     }
     tmpFiles.put(info.path, info);
@@ -377,7 +377,7 @@ public class DistribPackageStore implements PackageStore {
             try {
               Thread.sleep(2 * 1000);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
             }
           }
           // trying to avoid the thundering herd problem when there are a very large no:of nodes
@@ -389,7 +389,7 @@ public class DistribPackageStore implements PackageStore {
           //fire and forget
           Http2SolrClient.GET(url, coreContainer.getUpdateShardHandler().getTheSharedHttpClient());
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.info("Node: {} failed to respond for file fetch notification",  node, e);
           //ignore the exception
           // some nodes may be down or not responding
@@ -505,7 +505,7 @@ public class DistribPackageStore implements PackageStore {
         }
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Could not refresh files in {}", path, e);
     }
   }
@@ -540,7 +540,7 @@ public class DistribPackageStore implements PackageStore {
           log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.", packageStoreDir, solrHome);
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.", packageStoreDir, solrHome, e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
index 2bcb078..539fd19 100644
--- a/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
+++ b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
@@ -116,14 +116,14 @@ public class PackageStoreAPI {
               packageStore.refresh(KEYS_DIR);
               validate(entry.meta.signatures, entry, false);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               log.error("Error validating package artifact", e);
               errs.accept(e.getMessage());
             }
           }
         }, false);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Error reading file ", e);
         errs.accept("Error reading file " + path + " " + e.getMessage());
       }
@@ -170,7 +170,7 @@ public class PackageStoreAPI {
           throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       } catch (KeeperException.NodeExistsException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "A write is already in process , try later");
       } catch (KeeperException e) {
@@ -179,7 +179,7 @@ public class PackageStoreAPI {
         try {
           coreContainer.getZkController().getZkClient().delete(TMP_ZK_NODE, -1);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Unexpected error  ", e);
         }
       }
@@ -206,7 +206,7 @@ public class PackageStoreAPI {
       try {
         cryptoKeys = new CryptoKeys(keys);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
             "Error parsing public keys in Package store");
       }
@@ -260,7 +260,7 @@ public class PackageStoreAPI {
           try {
             packageStore.fetch(pathCopy, getFrom);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.error("Failed to download file: {}", pathCopy, e);
           }
           log.info("downloaded file: {}", pathCopy);
@@ -387,7 +387,7 @@ public class PackageStoreAPI {
     try {
       cryptoKeys = new CryptoKeys(keys);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Error parsing public keys in ZooKeeper");
     }
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrParams.java b/solr/core/src/java/org/apache/solr/handler/CdcrParams.java
index 32938d2..c28bfd8 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrParams.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrParams.java
@@ -16,11 +16,11 @@
  */
 package org.apache.solr.handler;
 
-import org.apache.solr.common.ParWork;
-
 import java.nio.charset.Charset;
 import java.util.Locale;
 
+import org.apache.solr.common.ParWork;
+
 public class CdcrParams {
 
   /**
@@ -182,7 +182,7 @@ public class CdcrParams {
         try {
           return CdcrAction.valueOf(p.toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
       return null;
@@ -206,7 +206,7 @@ public class CdcrParams {
         try {
           return ProcessState.valueOf(new String(state, Charset.forName("UTF-8")).toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
       return null;
@@ -238,7 +238,7 @@ public class CdcrParams {
         try {
           return BufferState.valueOf(new String(state, Charset.forName("UTF-8")).toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
       return null;
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java b/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java
index 154f195..87b0f94 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrReplicator.java
@@ -129,7 +129,7 @@ public class CdcrReplicator implements Runnable {
         log.info("Forwarded {} updates to target {}", counter, state.getTargetCollection());
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // report error and update error stats
       this.handleException(e);
     } finally {
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java
index bab9f8f..b75ce98 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorManager.java
@@ -20,11 +20,9 @@ import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.Optional;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
@@ -186,14 +184,14 @@ class CdcrReplicatorManager implements CdcrStateManager.CdcrStateObserver {
           try {
             bootstrapExecutor.submit(bootstrapStatusRunnable);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.error("Unable to submit bootstrap call to executor", e);
           }
         }
       } catch (IOException | SolrServerException | SolrException e) {
         log.warn("Unable to instantiate the log reader for target collection {}", state.getTargetCollection(), e);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
   }
@@ -268,7 +266,7 @@ class CdcrReplicatorManager implements CdcrStateManager.CdcrStateObserver {
               targetCollection, shard, leaderCoreUrl);
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
 
@@ -294,7 +292,7 @@ class CdcrReplicatorManager implements CdcrStateManager.CdcrStateObserver {
                   BOOTSTRAP_TIMEOUT_SECONDS - timeOut.timeLeft(TimeUnit.SECONDS), BOOTSTRAP_RETRY_DELAY_MS);
               timeOut.sleep(BOOTSTRAP_RETRY_DELAY_MS);
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
             }
           } else if (status == BootstrapStatus.COMPLETED) {
             log.info("CDCR bootstrap successful in {} seconds", BOOTSTRAP_TIMEOUT_SECONDS - timeOut.timeLeft(TimeUnit.SECONDS));
@@ -345,7 +343,7 @@ class CdcrReplicatorManager implements CdcrStateManager.CdcrStateObserver {
           }
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         state.reportError(CdcrReplicatorState.ErrorType.INTERNAL);
         Thread.currentThread().interrupt();
       } catch (IOException | SolrServerException | SolrException e) {
@@ -372,7 +370,7 @@ class CdcrReplicatorManager implements CdcrStateManager.CdcrStateObserver {
           String status = response.get(RESPONSE_STATUS).toString();
           return BootstrapStatus.valueOf(status.toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Exception submitting bootstrap request", e);
           return BootstrapStatus.UNKNOWN;
         }
@@ -409,7 +407,7 @@ class CdcrReplicatorManager implements CdcrStateManager.CdcrStateObserver {
           }
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Exception during bootstrap status request", e);
         return BootstrapStatus.UNKNOWN;
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorScheduler.java b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorScheduler.java
index d349be4..3fd68aa 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorScheduler.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrReplicatorScheduler.java
@@ -16,6 +16,13 @@
  */
 package org.apache.solr.handler;
 
+import java.lang.invoke.MethodHandles;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
@@ -23,9 +30,6 @@ import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.invoke.MethodHandles;
-import java.util.concurrent.*;
-
 /**
  * Schedule the execution of the {@link org.apache.solr.handler.CdcrReplicator} threads at
  * regular time interval. It relies on a queue of {@link org.apache.solr.handler.CdcrReplicatorState} in
@@ -104,7 +108,7 @@ class CdcrReplicatorScheduler {
       try {
         replicatorsPool.awaitTermination(60, TimeUnit.SECONDS);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       } finally {
         scheduler.shutdownNow();
         isStarted = false;
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java
index 9772a19..86ff2ea 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java
@@ -59,6 +59,7 @@ import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.core.CloseHook;
 import org.apache.solr.core.PluginBag;
 import org.apache.solr.core.SolrCore;
@@ -71,7 +72,6 @@ import org.apache.solr.update.SolrCoreState;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.VersionInfo;
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -423,7 +423,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
         }
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Error while requesting shard's checkpoints", e);
     } catch (ExecutionException e) {
@@ -531,7 +531,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
         logReader.next();
         lastProcessedVersion = Math.min(lastProcessedVersion, logReader.getLastVersion());
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
             "Error while fetching the last processed version", e);
       } catch (IOException e) {
@@ -654,7 +654,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
           try {
             bootstrapFuture.get();
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           } catch (ExecutionException e) {
             log.error("Bootstrap operation failed", e);
           }
@@ -711,7 +711,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
           rsp.add(RESPONSE_STATUS, FAILED);
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       } catch (ExecutionException e) {
         rsp.add(RESPONSE_STATUS, FAILED);
         rsp.add(RESPONSE, e);
diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java b/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java
index c610c5f..bfc7bd8 100644
--- a/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java
+++ b/solr/core/src/java/org/apache/solr/handler/CdcrUpdateLogSynchronizer.java
@@ -36,9 +36,9 @@ import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.update.CdcrUpdateLog;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -178,7 +178,7 @@ class CdcrUpdateLogSynchronizer implements CdcrStateManager.CdcrStateObserver {
             ulog.getBufferToggle().seek(lastVersion);
           }
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         } catch (IOException e) {
           log.warn("Couldn't advance replica buffering tlog reader to {} (to remove old tlogs): {}", lastVersion, e.getMessage());
         }
diff --git a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
index ea70b86..9833213 100644
--- a/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/DocumentAnalysisRequestHandler.java
@@ -16,12 +16,21 @@
  */
 package org.apache.solr.handler;
 
+import javax.xml.stream.XMLStreamConstants;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.XMLStreamReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Set;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.util.BytesRef;
 import org.apache.solr.client.solrj.impl.XMLResponseParser;
 import org.apache.solr.client.solrj.request.DocumentAnalysisRequest;
-import org.apache.solr.common.EmptyEntityResolver;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
@@ -41,16 +50,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
-import javax.xml.stream.XMLInputFactory;
-import javax.xml.stream.XMLStreamConstants;
-import javax.xml.stream.XMLStreamException;
-import javax.xml.stream.XMLStreamReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Set;
 
 /**
  * An analysis handler that provides a breakdown of the analysis process of provided documents. This handler expects a
@@ -194,7 +193,7 @@ public class DocumentAnalysisRequestHandler extends AnalysisRequestHandlerBase {
             ? getQueryTokenSet(queryValue, fieldType.getQueryAnalyzer())
             : EMPTY_BYTES_SET;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           // ignore analysis exceptions since we are applying arbitrary text to all fields
           termsToMatch = EMPTY_BYTES_SET;
         }
@@ -204,7 +203,7 @@ public class DocumentAnalysisRequestHandler extends AnalysisRequestHandlerBase {
             AnalysisContext analysisContext = new AnalysisContext(fieldType, fieldType.getQueryAnalyzer(), EMPTY_BYTES_SET);
             fieldTokens.add("query", analyzeValue(request.getQuery(), analysisContext));
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             // ignore analysis exceptions since we are applying arbitrary text to all fields
           }
         }
diff --git a/solr/core/src/java/org/apache/solr/handler/ExportHandler.java b/solr/core/src/java/org/apache/solr/handler/ExportHandler.java
index 14aae8f..e1c4bb0 100644
--- a/solr/core/src/java/org/apache/solr/handler/ExportHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ExportHandler.java
@@ -105,7 +105,7 @@ public class ExportHandler extends SearchHandler {
     try {
       super.handleRequestBody(req, rsp);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       rsp.setException(e);
     }
     String wt = req.getParams().get(CommonParams.WT, JSON);
diff --git a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
index 61bef32..eda59e9 100644
--- a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java
@@ -146,7 +146,7 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
     try {
       tupleStream = this.streamFactory.constructStream(params.get("expr"));
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       //Catch exceptions that occur while the stream is being created. This will include streaming expression parse rules.
       SolrException.log(log, e);
       @SuppressWarnings({"rawtypes"})
diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
index 5eee200..8674e28 100644
--- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
+++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
@@ -58,7 +58,6 @@ import java.util.zip.Checksum;
 import java.util.zip.InflaterInputStream;
 
 import com.google.common.base.Strings;
-import org.apache.http.client.HttpClient;
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.index.IndexWriter;
@@ -73,8 +72,6 @@ import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
 import org.apache.solr.client.solrj.impl.InputStreamResponseParser;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.cloud.CloudDescriptor;
@@ -86,7 +83,6 @@ import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.FastInputStream;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.NamedList;
@@ -102,7 +98,6 @@ import org.apache.solr.update.CdcrUpdateLog;
 import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.VersionInfo;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.util.FileUtils;
 import org.apache.solr.util.PropertiesOutputStream;
 import org.apache.solr.util.RTimer;
@@ -421,7 +416,7 @@ public class IndexFetcher {
       try {
         response = getLatestVersion();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         final String errorMsg = e.toString();
         if (!Strings.isNullOrEmpty(errorMsg) && errorMsg.contains(INTERRUPT_RESPONSE_MESSAGE)) {
             log.warn("Master at: {} is not available. Index fetch failed by interrupt. Exception: {}", masterUrl, errorMsg);
@@ -702,10 +697,10 @@ public class IndexFetcher {
       } catch (SolrException e) {
         throw e;
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new InterruptedException("Index fetch interrupted");
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(ErrorCode.SERVER_ERROR, "Index fetch failed : ", e);
       }
     } finally {
@@ -731,7 +726,7 @@ public class IndexFetcher {
           try {
             logReplicationTimeAndConfFiles(null, successfulInstall);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             // this can happen on shutdown, a fetch may be running in a thread after DirectoryFactory is closed
             log.warn("Could not log failed replication details", e);
           }
@@ -759,13 +754,13 @@ public class IndexFetcher {
           core.getDirectoryFactory().remove(tmpIndexDir);
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         SolrException.log(log, e);
       } finally {
         try {
           if (tmpIndexDir != null) core.getDirectoryFactory().release(tmpIndexDir);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           SolrException.log(log, e);
         }
         try {
@@ -773,13 +768,13 @@ public class IndexFetcher {
             core.getDirectoryFactory().release(indexDir);
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           SolrException.log(log, e);
         }
         try {
           if (tmpTlogDir != null) delTree(tmpTlogDir);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           SolrException.log(log, e);
         }
       }
@@ -881,7 +876,7 @@ public class IndexFetcher {
       
       solrCore.getDirectoryFactory().renameWithOverwrite(dir, tmpFileName, REPLICATION_PROPERTIES);
     } catch (Throwable e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception while updating statistics", e);
       if (e instanceof  Error) {
         throw e;
@@ -947,7 +942,7 @@ public class IndexFetcher {
         try {
           waitSearcher[0].get();
         } catch (InterruptedException | ExecutionException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
       commitPoint = searcher.get().getIndexReader().getIndexCommit();
diff --git a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
index fd991ed..b657c5f 100644
--- a/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/MoreLikeThisHandler.java
@@ -44,8 +44,8 @@ import org.apache.solr.common.SolrException;
 import org.apache.solr.common.StringUtils;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.FacetParams;
-import org.apache.solr.common.params.MoreLikeThisParams.TermStyle;
 import org.apache.solr.common.params.MoreLikeThisParams;
+import org.apache.solr.common.params.MoreLikeThisParams.TermStyle;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.ContentStream;
 import org.apache.solr.common.util.NamedList;
@@ -276,7 +276,7 @@ public class MoreLikeThisHandler extends RequestHandlerBase
               rsp.add("debug", dbgInfo);
             }
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             SolrException.log(log, "Exception during debug", e);
             rsp.add("exception_during_debug", SolrException.toStr(e));
           }
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 5aa65f7..f33181b 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -81,6 +81,7 @@ import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.FastOutputStream;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.SuppressForbidden;
 import org.apache.solr.core.CloseHook;
@@ -101,7 +102,6 @@ import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.update.CdcrUpdateLog;
 import org.apache.solr.update.SolrIndexWriter;
 import org.apache.solr.update.VersionInfo;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.util.NumberUtils;
 import org.apache.solr.util.PropertiesInputStream;
 import org.apache.solr.util.RefCounted;
@@ -394,7 +394,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         checksum.update(buffer, 0, bytesRead);
       return checksum.getValue();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Exception in finding checksum of {}", f, e);
     } finally {
       ParWork.close(fis);
@@ -430,7 +430,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         throw (AlreadyClosedException) e;
       }
 
-      ParWork.propegateInterrupt("Index fetch failed", e);
+      ParWork.propagateInterrupt("Index fetch failed", e);
       if (currentIndexFetcher != pollingIndexFetcher) {
         currentIndexFetcher.destroy();
       }
@@ -534,7 +534,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           restoreStatus.add(STATUS, FAILED);
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         restoreStatus.add(STATUS, FAILED);
         restoreStatus.add(EXCEPTION, e.getMessage());
         rsp.add(CMD_RESTORE_STATUS, restoreStatus);
@@ -595,7 +595,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     } catch (SolrException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error("Exception while creating a snapshot", e);
       reportErrorOnResponse(rsp, "Error encountered while creating a snapshot: " + e.getMessage(), e);
     }
@@ -667,7 +667,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
                 long checksum = CodecUtil.retrieveChecksum(in);
                 fileMeta.put(CHECKSUM, checksum);
               } catch (Exception e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
                 //TODO Should this trigger a larger error?
                 log.warn("Could not read checksum from index file: {}", file, e);
               }
@@ -687,7 +687,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
             try {
               fileMeta.put(CHECKSUM, CodecUtil.retrieveChecksum(in));
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               //TODO Should this trigger a larger error?
               log.warn("Could not read checksum from index file: {}", infos.getSegmentsFileName(), e);
             }
@@ -987,7 +987,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           NamedList nl = fetcher.getDetails();
           slave.add("masterDetails", nl.get(CMD_DETAILS));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn(
               "Exception while invoking 'details' method for replication on master ",
               e);
@@ -1097,7 +1097,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           slave.add("timeRemaining", String.valueOf(estimatedTimeRemaining) + "s");
           slave.add("downloadSpeed", NumberUtils.readableSize(downloadSpeed));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Exception while writing replication details: ", e);
         }
       }
@@ -1238,7 +1238,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         IndexFetchResult fetchResult = doFetch(null, false);
         if (pollListener != null) pollListener.onComplete(core, fetchResult);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Exception in fetching index", e);
       }
     };
@@ -1489,7 +1489,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
             snapShooter.validateCreateSnapshot();
             snapShooter.createSnapAsync(numberToKeep, (nl) -> snapShootDetails = nl);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.error("Exception while snapshooting", e);
           }
         }
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index 67ea173..5785296 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -19,8 +19,6 @@ package org.apache.solr.handler;
 import java.io.Closeable;
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
 
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
@@ -40,7 +38,6 @@ import org.apache.solr.common.util.SuppressForbidden;
 import org.apache.solr.core.PluginBag;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
@@ -224,7 +221,7 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
         }
       }
     } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new AlreadyClosedException(e);
     } catch (Exception e) {
       log.error("Error get", e);
diff --git a/solr/core/src/java/org/apache/solr/handler/RestoreCore.java b/solr/core/src/java/org/apache/solr/handler/RestoreCore.java
index 20ef0ea..c3eaace 100644
--- a/solr/core/src/java/org/apache/solr/handler/RestoreCore.java
+++ b/solr/core/src/java/org/apache/solr/handler/RestoreCore.java
@@ -86,7 +86,7 @@ public class RestoreCore implements Callable<Boolean> {
           try {
             checksum = CodecUtil.retrieveChecksum(indexInput);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             log.warn("Could not read checksum from index file: {}", filename, e);
           }
           long length = indexInput.length();
@@ -99,7 +99,7 @@ public class RestoreCore implements Callable<Boolean> {
             restoreIndexDir.copyFrom(indexDir, filename, filename, IOContext.READONCE);
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Exception while restoring the backup index ", e);
           throw new SolrException(SolrException.ErrorCode.UNKNOWN, "Exception while restoring the backup index", e);
         }
@@ -113,7 +113,7 @@ public class RestoreCore implements Callable<Boolean> {
         success = true;
         log.info("Successfully restored to the backup index");
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         //Rollback to the old index directory. Delete the restore index directory and mark the restore as failed.
         log.warn("Could not switch to restored index. Rolling back to the current index", e);
         Directory dir = null;
diff --git a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java
index 6709d78..5e76848 100644
--- a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java
@@ -215,7 +215,7 @@ public class SchemaHandler extends RequestHandlerBase implements SolrCoreAware,
       }
 
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       rsp.setException(e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
index d2cabd3..d1ee999 100644
--- a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
+++ b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
@@ -218,7 +218,7 @@ public class SnapShooter {
       try {
         snapShootDetails = createSnapshot();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Exception while creating snapshot", e);
         snapShootDetails = new NamedList<>();
         snapShootDetails.add("exception", e.getMessage());
@@ -283,7 +283,7 @@ public class SnapShooter {
         try {
           backupRepo.deleteDirectory(snapshotDirPath);
         } catch (Exception excDuringDelete) {
-          ParWork.propegateInterrupt(excDuringDelete);
+          ParWork.propagateInterrupt(excDuringDelete);
           log.warn("Failed to delete {} after snapshot creation failed due to: {}", snapshotDirPath, excDuringDelete);
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index be07834..5ee0177 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -16,6 +16,24 @@
  */
 package org.apache.solr.handler;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import org.apache.solr.api.Api;
@@ -79,23 +97,6 @@ import static org.apache.solr.core.SolrConfig.PluginOpts.REQUIRE_CLASS;
 import static org.apache.solr.core.SolrConfig.PluginOpts.REQUIRE_NAME;
 import static org.apache.solr.core.SolrConfig.PluginOpts.REQUIRE_NAME_IN_OVERLAY;
 import static org.apache.solr.schema.FieldType.CLASS_NAME;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
 
 public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -239,7 +240,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
                         (ZkSolrResourceLoader) req.getCore()
                             .getResourceLoader()).run();
                   } catch (Exception e) {
-                    ParWork.propegateInterrupt(e);
+                    ParWork.propagateInterrupt(e);
                     if (e instanceof InterruptedException || e instanceof AlreadyClosedException) {
                       return;
                     }
@@ -397,7 +398,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
           }
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (e instanceof  InterruptedException || e instanceof  AlreadyClosedException) {
           return;
         }
@@ -436,7 +437,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
               try {
                 val = (Map) entry.getValue();
               } catch (Exception e1) {
-                ParWork.propegateInterrupt(e1);
+                ParWork.propagateInterrupt(e1);
                 op.addError("invalid params for key : " + key);
                 continue;
               }
@@ -645,7 +646,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
             req.getCore().createInitInstance(info, expected, clz, "");
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.error("Error checking plugin : ", e);
           op.addError(e.getMessage());
           return false;
@@ -714,7 +715,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
             try {
               val = Boolean.parseBoolean(val.toString());
             } catch (Exception exp) {
-              ParWork.propegateInterrupt(exp);
+              ParWork.propagateInterrupt(exp);
               op.addError(formatString(typeErr, name, typ.getSimpleName()));
               continue;
             }
@@ -722,7 +723,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
             try {
               val = Integer.parseInt(val.toString());
             } catch (Exception exp) {
-              ParWork.propegateInterrupt(exp);
+              ParWork.propagateInterrupt(exp);
               op.addError(formatString(typeErr, name, typ.getSimpleName()));
               continue;
             }
@@ -866,7 +867,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
                 failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList));
 
     } catch (InterruptedException ie) {
-      ParWork.propegateInterrupt(ie);
+      ParWork.propagateInterrupt(ie);
       return;
     }
 
@@ -965,7 +966,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
             }
           } catch (Exception e) {
             if (e instanceof InterruptedException || e instanceof AlreadyClosedException) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               break; // stop looping
             } else {
               log.warn("Failed to get /schema/zkversion from {} due to: ", coreUrl, e);
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 08028bc..2411c42 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -21,7 +21,6 @@ import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
@@ -188,7 +187,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
         tupleStream = this.streamFactory.constructStream(streamExpression);
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // Catch exceptions that occur while the stream is being created. This will include streaming expression parse
       // rules.
       SolrException.log(log, e);
diff --git a/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandlerApi.java b/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandlerApi.java
index 76c9a1f..bf7dc3b 100644
--- a/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandlerApi.java
+++ b/solr/core/src/java/org/apache/solr/handler/UpdateRequestHandlerApi.java
@@ -22,9 +22,9 @@ import java.util.Collections;
 import java.util.Map;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.solr.api.Api;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.api.Api;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
@@ -50,7 +50,7 @@ public class UpdateRequestHandlerApi extends UpdateRequestHandler  {
         } catch (RuntimeException e) {
           throw e;
         } catch (Exception e){
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,e );
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java
index bf68f5d..e6c7c6c 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java
@@ -132,7 +132,7 @@ public class AutoscalingHistoryHandler extends RequestHandlerBase implements Per
       QueryResponse qr = cloudSolrClient.query(collection, params);
       rsp.setAllValues(qr.getResponse());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if ((e instanceof SolrException) && e.getMessage().contains("Collection not found")) {
         // relatively benign
         String msg = "Collection " + collection + " does not exist.";
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/BackupCoreOp.java b/solr/core/src/java/org/apache/solr/handler/admin/BackupCoreOp.java
index 8b98540..52edb83 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/BackupCoreOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/BackupCoreOp.java
@@ -68,7 +68,7 @@ class BackupCoreOp implements CoreAdminHandler.CoreAdminOp {
       snapShooter.validateCreateSnapshot();
       snapShooter.createSnapshot();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "Failed to backup core=" + cname + " because " + e, e);
     }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java b/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java
index 282e791..3139450 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java
@@ -122,7 +122,7 @@ public abstract class BaseHandlerApiSupport implements ApiSupport {
         } catch (SolrException e) {
           throw e;
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(BAD_REQUEST, e); //TODO BAD_REQUEST is a wild guess; should we flip the default?  fail here to investigate how this happens in tests
         } finally {
           req.setParams(params);
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
index c6f2b50..8e334a2 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
@@ -75,7 +75,7 @@ public class CollectionHandlerApi extends BaseHandlerApiSupport {
       try {
         clusterProperties.setClusterProperties(commands.get(0).getDataMap());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error in API", e);
       }
     });
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 15239cb..c58dd0e 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -16,6 +16,26 @@
  */
 package org.apache.solr.handler.admin;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.stream.Collectors;
+
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
 import org.apache.commons.io.IOUtils;
@@ -124,48 +144,7 @@ import static org.apache.solr.common.params.CollectionAdminParams.FOLLOW_ALIASES
 import static org.apache.solr.common.params.CollectionAdminParams.PROPERTY_NAME;
 import static org.apache.solr.common.params.CollectionAdminParams.PROPERTY_VALUE;
 import static org.apache.solr.common.params.CollectionAdminParams.WITH_COLLECTION;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ALIASPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.BACKUP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.BALANCESHARDUNIQUE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CLUSTERPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CLUSTERSTATUS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.COLLECTIONPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.COLSTATUS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATEALIAS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATESHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATESNAPSHOT;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEALIAS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETENODE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICAPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESNAPSHOT;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESTATUS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.FORCELEADER;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.LIST;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.LISTALIASES;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.LISTSNAPSHOTS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MIGRATE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MIGRATESTATEFORMAT;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MODIFYCOLLECTION;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.OVERSEERSTATUS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REBALANCELEADERS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REINDEXCOLLECTION;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.RELOAD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REMOVEROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.RENAME;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REPLACENODE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REQUESTSTATUS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.RESTORE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.SYNCSHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.UTILIZENODE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.*;
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
 import static org.apache.solr.common.params.CommonAdminParams.IN_PLACE_MOVE;
 import static org.apache.solr.common.params.CommonAdminParams.NUM_SUB_SHARDS;
@@ -185,25 +164,6 @@ import static org.apache.solr.common.params.CoreAdminParams.INSTANCE_DIR;
 import static org.apache.solr.common.params.CoreAdminParams.ULOG_DIR;
 import static org.apache.solr.common.params.ShardParams._ROUTE_;
 import static org.apache.solr.common.util.StrUtils.formatString;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.stream.Collectors;
 
 public class CollectionsHandler extends RequestHandlerBase implements PermissionNameProvider {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -385,7 +345,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
             try {
               coreContainer.getZkController().clearAsyncId(asyncId);
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               // let the original exception bubble up
               log.error("Unable to release async ID={}", asyncId, e);
               SolrZkClient.checkInterrupted(e);
@@ -1410,14 +1370,14 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       try {
         zkController.getZkStateReader().getLeaderRetry(collectionName, sliceId, 30);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.info("Couldn't successfully force leader, collection: {}, shard: {}. Cluster state: {}", collectionName, sliceId, clusterState);
       }
 
     } catch (SolrException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR,
           "Error executing FORCELEADER operation for collection: " + collectionName + " shard: " + sliceId, e);
     }
@@ -1460,7 +1420,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
       throw new RuntimeException("Failed while waiting for active collection" + "\n" + e.getMessage() + " \nShards:" + shards + " Replicas:" + totalReplicas + "\nLive Nodes: " + Arrays.toString(liveNodesLastSeen.get().toArray())
               + "\nLast available state: " + state.get());
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
 
@@ -1476,7 +1436,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
         try {
           new Rule(map);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error in rule " + m, e);
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
index 309fc71..b2042bc 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
@@ -183,7 +183,7 @@ public class CoreAdminHandler extends RequestHandlerBase implements PermissionNa
                 taskObject.setRspObject(callInfo.rsp);
               }
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               exceptionCaught = true;
               taskObject.setRspObjectFromException(e);
             } finally {
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
index 664e117..aacbe2f 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java
@@ -370,7 +370,7 @@ enum CoreAdminOperation implements CoreAdminOp {
     try {
       fun.execute(it);
     } catch (SolrException | InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // No need to re-wrap; throw as-is.
       throw e;
     } catch (Exception e) {
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
index ccc4999..5ad02ab 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
@@ -392,7 +392,7 @@ public class LukeRequestHandler extends RequestHandlerBase
                 fieldMap.add("index", "(unstored field)");
               }
             } catch (Exception ex) {
-              ParWork.propegateInterrupt(ex);
+              ParWork.propagateInterrupt(ex);
               log.warn("error reading field: {}", fieldName);
             }
           }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MergeIndexesOp.java b/solr/core/src/java/org/apache/solr/handler/admin/MergeIndexesOp.java
index 1dde980..c475a76 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MergeIndexesOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MergeIndexesOp.java
@@ -115,7 +115,7 @@ class MergeIndexesOp implements CoreAdminHandler.CoreAdminOp {
           processorChain.createProcessor(wrappedReq, it.rsp);
       processor.processMergeIndexes(new MergeIndexesCommand(readers, it.req));
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // log and rethrow so that if the finally fails we don't lose the original problem
       log.error("ERROR executing merge:", e);
       throw e;
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
index 2fa09b4..0655305 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHistoryHandler.java
@@ -75,6 +75,7 @@ import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.common.util.Pair;
 import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.metrics.SolrMetricManager;
@@ -83,7 +84,6 @@ import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.security.AuthorizationContext;
 import org.apache.solr.security.PermissionNameProvider;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.rrd4j.ConsolFun;
 import org.rrd4j.DsType;
 import org.rrd4j.core.ArcDef;
@@ -267,7 +267,7 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
           }
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (logMissingCollection) {
           log.warn("Error getting cluster state, keeping metrics history in memory", e);
         }
@@ -283,7 +283,7 @@ public class MetricsHistoryHandler extends RequestHandlerBase implements Permiss
         factory.setPersistent(true);
         logMissingCollection = true;
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (logMissingCollection) {
           log.info("No {} collection, keeping metrics history in memory.", CollectionAdminParams.SYSTEM_COLL);
         }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/RequestApplyUpdatesOp.java b/solr/core/src/java/org/apache/solr/handler/admin/RequestApplyUpdatesOp.java
index fd2b685..5c1a01a7c 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/RequestApplyUpdatesOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/RequestApplyUpdatesOp.java
@@ -58,7 +58,7 @@ class RequestApplyUpdatesOp implements CoreAdminHandler.CoreAdminOp {
       it.rsp.add("core", cname);
       it.rsp.add("status", "BUFFER_APPLIED");
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       CoreAdminOperation.log().warn("Recovery was interrupted", e);
     } catch (Exception e) {
       if (e instanceof SolrException)
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java
index 65328d0..8cdabce 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ZookeeperInfoHandler.java
@@ -562,7 +562,7 @@ public final class ZookeeperInfoHandler extends RequestHandlerBase {
         writeKeyValue(json, "warning", e.toString(), false);
         log.warn("Keeper Exception", e);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
 
       if (stat.getNumChildren() > 0) {
@@ -594,7 +594,7 @@ public final class ZookeeperInfoHandler extends RequestHandlerBase {
           writeError(500, e.toString());
           return false;
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return false;
         } catch (IllegalArgumentException e) {
           // path doesn't exist (must have been removed)
@@ -768,7 +768,7 @@ public final class ZookeeperInfoHandler extends RequestHandlerBase {
         writeError(500, e.toString());
         return false;
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return false;
       }
       return true;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java b/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java
index 846abe6..f9d759d 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/CloudReplicaSource.java
@@ -189,7 +189,7 @@ class CloudReplicaSource implements ReplicaSource {
         try {
           shardLeader = zkStateReader.getLeaderRetry(collectionName, sliceName);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
               "Exception finding leader for shard " + sliceName + " in collection "
                   + collectionName, e);
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
index 69df53c..a62a8df 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
@@ -275,7 +275,7 @@ public class HttpShardHandler extends ShardHandler {
           return rsp;
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new AlreadyClosedException(e);
       } catch (ExecutionException e) {
         // should be impossible... the problem with catching the exception
@@ -457,7 +457,7 @@ public class HttpShardHandler extends ShardHandler {
                     return true;
                   });
                 } catch (InterruptedException e) {
-                  ParWork.propegateInterrupt(e);
+                  ParWork.propagateInterrupt(e);
                   throw new AlreadyClosedException(e);
                 } catch (TimeoutException e) {
                   throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "no servers hosting shard: " + rb.slices[i]);
@@ -557,7 +557,7 @@ public class HttpShardHandler extends ShardHandler {
         try {
           shardLeader = zkController.getZkStateReader().getLeaderRetry(cloudDescriptor.getCollectionName(), slice.getName());
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Exception finding leader for shard " + slice.getName() + " in collection "
                   + cloudDescriptor.getCollectionName(), e);
         } catch (SolrException e) {
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index bd7ca10..c20896b 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -16,6 +16,28 @@
  */
 package org.apache.solr.metrics;
 
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+import java.util.stream.Collectors;
+
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Histogram;
@@ -39,28 +61,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.MDC;
 
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.regex.Pattern;
-import java.util.regex.PatternSyntaxException;
-import java.util.stream.Collectors;
-
 /**
  * This class maintains a repository of named {@link MetricRegistry} instances, and provides several
  * helper methods for managing various aspects of metrics reporting:
@@ -1087,7 +1087,7 @@ public class SolrMetricManager {
         throw new Exception("Could not obtain lock to modify reporters registry: " + registry);
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new Exception("Interrupted while trying to obtain lock to modify reporters registry: " + registry);
     }
     try {
@@ -1129,7 +1129,7 @@ public class SolrMetricManager {
         return false;
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Interrupted while trying to obtain lock to modify reporters registry: {}", registry);
       return false;
     }
@@ -1227,7 +1227,7 @@ public class SolrMetricManager {
         return Collections.emptyMap();
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Interrupted while trying to obtain lock to modify reporters registry: {}", registry);
       return Collections.emptyMap();
     }
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
index 759b80a..79b5fab 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
@@ -262,7 +262,7 @@ public class SolrClusterReporter extends SolrCoreContainerReporter {
         log.warn("Could not obtain overseer's address, skipping.", e);
         return lastKnownUrl;
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return lastKnownUrl;
       }
       if (props == null) {
diff --git a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
index 8a84b78..bb8d5cd 100644
--- a/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
+++ b/solr/core/src/java/org/apache/solr/pkg/PackageAPI.java
@@ -121,7 +121,7 @@ public class PackageAPI {
             } catch (KeeperException e) {
               log.error("A ZK error has occurred", e);
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
             }
           }
 
@@ -242,7 +242,7 @@ public class PackageAPI {
           Http2SolrClient.GET(coreContainer.getZkController().
               zkStateReader.getBaseUrlForNodeName(s).replace("/solr", "/api") + "/cluster/package?wt=javabin&omitHeader=true&refreshPackage=" + p, coreContainer.getUpdateShardHandler().getTheSharedHttpClient());
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         } catch (ExecutionException e) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
         } catch (TimeoutException e) {
@@ -398,7 +398,7 @@ public class PackageAPI {
         try {
           Thread.sleep(10);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
         try {
           pkgs = readPkgsFromZk(null, null);
@@ -421,7 +421,7 @@ public class PackageAPI {
             getBaseUrlForNodeName(s).replace("/solr", "/api") +
             "/cluster/package?wt=javabin&omitHeader=true&expectedVersion" + expected, coreContainer.getUpdateShardHandler().getTheSharedHttpClient());
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       } catch (ExecutionException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       } catch (TimeoutException e) {
diff --git a/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java b/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
index cbf82af..bbd28aa 100644
--- a/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
+++ b/solr/core/src/java/org/apache/solr/request/PerSegmentSingleValuedFaceting.java
@@ -146,7 +146,7 @@ class PerSegmentSingleValuedFaceting {
           completionService.submit(pending.removeFirst());
         }
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       } catch (ExecutionException e) {
         Throwable cause = e.getCause();
diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
index 9a65453..9b39019 100644
--- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
@@ -16,6 +16,24 @@
  */
 package org.apache.solr.request;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.AbstractMap.SimpleImmutableEntry;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Future;
+import java.util.function.Predicate;
+import java.util.stream.Stream;
+
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.lucene.index.ExitableDirectoryReader;
 import org.apache.lucene.index.LeafReader;
@@ -75,23 +93,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.CommonParams.SORT;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.AbstractMap.SimpleImmutableEntry;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.IdentityHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.Executor;
-import java.util.concurrent.Future;
-import java.util.function.Predicate;
-import java.util.stream.Stream;
 
 /**
  * A class that generates simple Facet information for a request.
@@ -839,7 +840,7 @@ public class SimpleFacets {
             throw timeout;
           }
           catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(ErrorCode.SERVER_ERROR,
                                     "Exception during facet.field: " + facetValue, e);
           }
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
index fffa5a2..b05689a 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
@@ -16,6 +16,13 @@
  */
 package org.apache.solr.rest.schema;
 
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import java.lang.invoke.MethodHandles;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
@@ -29,13 +36,6 @@ import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 import org.w3c.dom.Node;
 
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import java.lang.invoke.MethodHandles;
-import java.util.List;
-import java.util.Map;
-
 /**
  * Utility class for converting a JSON definition of a FieldType into the
  * XML format expected by the FieldTypePluginLoader.
@@ -79,7 +79,7 @@ public class FieldTypeXmlAdapter {
     try {
       factory.setFeature(feature, enabled);
     } catch (Exception ex) {
-      ParWork.propegateInterrupt(ex);
+      ParWork.propagateInterrupt(ex);
       // ignore
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
index c9dc786..998c0ef 100644
--- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
@@ -51,7 +51,6 @@ import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrResponse;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
 import org.apache.solr.common.ParWork;
@@ -275,7 +274,7 @@ public final class ManagedIndexSchema extends IndexSchema {
     } catch (InterruptedException ie) {
       log.warn("Core {} was interrupted waiting for schema version {} to propagate to {} replicas for collection {}"
           , localCoreNodeName, schemaZkVersion, concurrentTasks.size(), collection);
-      ParWork.propegateInterrupt(ie);
+      ParWork.propagateInterrupt(ie);
     }
 
     if (log.isInfoEnabled()) {
diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java
index a9387e3..2ef7b60 100644
--- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java
+++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java
@@ -16,6 +16,12 @@
  */
 package org.apache.solr.schema;
 
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.solr.cloud.ZkController;
@@ -40,12 +46,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.InputSource;
 
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-
 /** Factory for ManagedIndexSchema */
 public class ManagedIndexSchemaFactory extends IndexSchemaFactory implements SolrCoreAware {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -142,7 +142,7 @@ public class ManagedIndexSchemaFactory extends IndexSchemaFactory implements Sol
         loadedResource = managedSchemaResourceName;
         warnIfNonManagedSchemaExists();
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
       } catch (KeeperException.NoNodeException e) {
         log.info("The schema is configured as managed, but managed schema resource {} not found - loading non-managed schema {} instead"
@@ -233,7 +233,7 @@ public class ManagedIndexSchemaFactory extends IndexSchemaFactory implements Sol
         try {
           exists = zkLoader.getZkController().pathExists(nonManagedSchemaPath);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         } catch (KeeperException e) {
           // log as warning and suppress the exception
           log.warn("Error checking for the existence of the non-managed schema {}", resourceName, e);
@@ -419,7 +419,7 @@ public class ManagedIndexSchemaFactory extends IndexSchemaFactory implements Sol
         log.error(msg, e);
         throw new SolrException(ErrorCode.SERVER_ERROR, msg, e);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     } else {
       this.zkIndexSchemaReader = null;
diff --git a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java
index 590ca89..11c2315 100644
--- a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java
+++ b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java
@@ -99,7 +99,7 @@ public class ZkIndexSchemaReader implements OnReconnect {
       log.error(msg, e);
       throw new ZooKeeperException(ErrorCode.SERVER_ERROR, msg, e);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
     }
     
     return watcher;
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 22897f7..f8be5c4 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -155,7 +155,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
       dr = core.getIndexReaderFactory().newReader(dir, core);
       return dr;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error opening Reader", e);
     } finally {
       if (dir != null) {
@@ -2251,7 +2251,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
         }
         return total;
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return -1;
       }
     }, true, "indexCommitSize", Category.SEARCHER.toString(), scope);
diff --git a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
index 886fca7..9dd48c4 100644
--- a/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
+++ b/solr/core/src/java/org/apache/solr/search/ValueSourceParser.java
@@ -166,7 +166,7 @@ public abstract class ValueSourceParser implements NamedListInitializedPlugin {
         try {
           Thread.sleep(ms);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new RuntimeException(e);
         }
         return source;
diff --git a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java
index a16fcdd..86f8bf1 100644
--- a/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/AuditLoggerPlugin.java
@@ -16,21 +16,6 @@
  */
 package org.apache.solr.security;
 
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.Meter;
-import com.codahale.metrics.Timer;
-import com.fasterxml.jackson.annotation.JsonInclude.Include;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import org.apache.solr.common.ParWork;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.metrics.SolrMetricsContext;
-import org.apache.solr.security.AuditEvent.EventType;
-import org.eclipse.jetty.util.BlockingArrayQueue;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.StringWriter;
@@ -47,6 +32,21 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.LongAdder;
 
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.Timer;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import org.apache.solr.common.ParWork;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.core.SolrInfoBean;
+import org.apache.solr.metrics.SolrMetricsContext;
+import org.apache.solr.security.AuditEvent.EventType;
+import org.eclipse.jetty.util.BlockingArrayQueue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
  * Base class for Audit logger plugins.
  * This interface may change in next release and is marked experimental
@@ -178,7 +178,7 @@ public abstract class AuditLoggerPlugin extends ParWork.NoLimitsCallable impleme
       try {
         queue.put(event);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     } else {
       if (!queue.offer(event)) {
@@ -207,7 +207,7 @@ public abstract class AuditLoggerPlugin extends ParWork.NoLimitsCallable impleme
         numLogged.mark();
         totalTime.inc(timer.stop());
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return null;
       } catch (Exception ex) {
         log.error("Exception when attempting to audit log asynchronously", ex);
@@ -322,7 +322,7 @@ public abstract class AuditLoggerPlugin extends ParWork.NoLimitsCallable impleme
       try {
         executorService.awaitTermination(10, TimeUnit.SECONDS);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
     try {
@@ -349,7 +349,7 @@ public abstract class AuditLoggerPlugin extends ParWork.NoLimitsCallable impleme
           }
           timeSlept ++;
         } catch (InterruptedException ignored) {
-          ParWork.propegateInterrupt(ignored);
+          ParWork.propagateInterrupt(ignored);
           break;
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java b/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
index 07bf86e..e336ea4 100644
--- a/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
+++ b/solr/core/src/java/org/apache/solr/security/DelegationTokenKerberosFilter.java
@@ -16,19 +16,18 @@
  */
 package org.apache.solr.security;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.Enumeration;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Locale;
-
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
 import javax.servlet.ServletException;
 import javax.servlet.ServletRequest;
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Enumeration;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
 
 import org.apache.curator.RetryPolicy;
 import org.apache.curator.framework.AuthInfo;
@@ -36,7 +35,6 @@ import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.framework.api.ACLProvider;
 import org.apache.curator.retry.ExponentialBackoffRetry;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.AuthenticationHandler;
@@ -73,7 +71,7 @@ public class DelegationTokenKerberosFilter extends DelegationTokenAuthentication
         conf.getServletContext().setAttribute("signer.secret.provider.zookeeper.curator.client",
             getCuratorClient(zkClient));
       } catch (InterruptedException | KeeperException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new ServletException(e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index f10f846..1ac941f 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -16,6 +16,39 @@
  */
 package org.apache.solr.servlet;
 
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ReadListener;
+import javax.servlet.ServletException;
+import javax.servlet.ServletInputStream;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.UnavailableException;
+import javax.servlet.WriteListener;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpServletResponseWrapper;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.lang.invoke.MethodHandles;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.Instant;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Locale;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
 import com.codahale.metrics.jvm.ClassLoadingGaugeSet;
 import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
 import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
@@ -63,38 +96,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.security.AuditEvent.EventType;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ReadListener;
-import javax.servlet.ServletException;
-import javax.servlet.ServletInputStream;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.UnavailableException;
-import javax.servlet.WriteListener;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpServletResponseWrapper;
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.lang.invoke.MethodHandles;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.time.Instant;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Locale;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 /**
  * This filter looks at the incoming URL maps them to handlers defined in solrconfig.xml
@@ -241,7 +242,7 @@ public class SolrDispatchFilter extends BaseSolrFilter {
       });
       metricManager.registerGauge(null, registryName, sysprops, metricTag, true, "properties", "system");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.warn("Error registering JVM metrics", e);
     }
   }
@@ -400,7 +401,7 @@ public class SolrDispatchFilter extends BaseSolrFilter {
         try {
           init.await();
         } catch (InterruptedException e) { //well, no wait then
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
         final String msg = "Error processing the request. CoreContainer is either not initialized or shutting down.";
         if (cores == null || cores.isShutDown()) {
diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
index 14b33d4..f79715b 100644
--- a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
+++ b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
@@ -195,7 +195,7 @@ public class Suggester extends SolrSpellChecker {
           IOUtils.closeWhileHandlingException(is);
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.info("load failed, need to build Lookup again");
       }
 
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
index a4550ec..4707a47 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
@@ -81,7 +81,7 @@ public class HdfsDirectory extends BaseDirectory {
         try {
           Thread.sleep(5000);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return;
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java
index 669b01d..c3bdd18 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java
@@ -77,7 +77,7 @@ public class HdfsLockFactory extends LockFactory {
           try {
             Thread.sleep(5000);
           } catch (InterruptedException e1) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             break;
           }
           continue;
diff --git a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
index ae345d4..67700f0 100644
--- a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
+++ b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
@@ -16,6 +16,20 @@
  */
 package org.apache.solr.update;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.MergePolicy;
 import org.apache.lucene.search.Sort;
@@ -25,7 +39,6 @@ import org.apache.solr.common.AlreadyClosedException;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.DirectoryFactory;
@@ -36,20 +49,6 @@ import org.apache.solr.util.RefCounted;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.RejectedExecutionException;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
 public final class DefaultSolrCoreState extends SolrCoreState implements RecoveryStrategy.RecoveryListener {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
@@ -116,7 +115,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
       }
       indexWriter = null;
     } catch (Exception e) {
-      ParWork.propegateInterrupt("Error during close of writer.", e);
+      ParWork.propagateInterrupt("Error during close of writer.", e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
   }
@@ -223,14 +222,14 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
           log.debug("Closing old IndexWriter... core=" + coreName);
           iw.close();
         } catch (Exception e) {
-          ParWork.propegateInterrupt("Error closing old IndexWriter. core=" + coreName, e);
+          ParWork.propagateInterrupt("Error closing old IndexWriter. core=" + coreName, e);
         }
       } else {
         try {
           log.debug("Rollback old IndexWriter... core=" + coreName);
           iw.rollback();
         } catch (Exception e) {
-          ParWork.propegateInterrupt("Error rolling back old IndexWriter. core=" + coreName, e);
+          ParWork.propagateInterrupt("Error rolling back old IndexWriter. core=" + coreName, e);
         }
       }
     }
@@ -286,7 +285,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
       iw = SolrIndexWriter.buildIndexWriter(core, name, core.getNewIndexDir(), core.getDirectoryFactory(), false, core.getLatestSchema(),
               core.getSolrConfig().indexConfig, core.getDeletionPolicy(), core.getCodec());
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
 
@@ -403,7 +402,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
             log.error("Exception waiting for previous recovery to finish");
           }
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return;
         }
       }
@@ -447,7 +446,7 @@ public final class DefaultSolrCoreState extends SolrCoreState implements Recover
               recoveryFuture.get(10,
                   TimeUnit.MINUTES); // nocommit - how long? make configurable too
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
             } catch (ExecutionException e) {
               throw new SolrException(ErrorCode.SERVER_ERROR, e);
diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
index f2ff4d0..42f1583 100644
--- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
+++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
@@ -749,7 +749,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState
        try {
         waitSearcher[0].get();
       } catch (InterruptedException | ExecutionException e) {
-         ParWork.propegateInterrupt(e);
+         ParWork.propagateInterrupt(e);
       }
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
index 2fc394b..825afe2 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
@@ -151,7 +151,7 @@ public class HdfsUpdateLog extends UpdateLog {
           try {
             Thread.sleep(5000);
           } catch (InterruptedException e1) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             break;
           }
           continue;
diff --git a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
index 4ee36d8..efbdf52 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
@@ -16,6 +16,18 @@
  */
 package org.apache.solr.update;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+import java.net.ConnectException;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Phaser;
+
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.BinaryResponseParser;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
@@ -36,18 +48,6 @@ import org.apache.solr.update.processor.DistributedUpdateProcessor.RollupRequest
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-import java.net.ConnectException;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Phaser;
-
 /**
  * Used for distributing commands from a shard leader to its replicas.
  */
@@ -374,7 +374,7 @@ public class SolrCmdDistributor implements Closeable {
             }
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Failed to parse response from {} during replication factor accounting", node, e);
         }
       }
@@ -535,7 +535,7 @@ public class SolrCmdDistributor implements Closeable {
         leaderProps = new ZkCoreNodeProps(zkStateReader.getLeaderRetry(
             collection, shardId));
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         return false;
       } catch (Exception e) {
         // we retry with same info
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
index 9cb1050..ad4b875 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
@@ -22,7 +22,6 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -120,7 +119,7 @@ public class SolrIndexWriter extends IndexWriter {
       dir = getDir(directoryFactory, path, config);
       iw = new SolrIndexWriter(core, name, directoryFactory, dir, create, schema, config, delPolicy, codec);
     } catch (Throwable e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       SolrException exp = new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
 
       if (iw != null) {
@@ -238,7 +237,7 @@ public class SolrIndexWriter extends IndexWriter {
     try {
       dir = directoryFactory.get(path,  DirContext.DEFAULT, config.lockType);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       SolrException exp = new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       if (dir != null) try {
         directoryFactory.release(dir);
@@ -367,7 +366,7 @@ public class SolrIndexWriter extends IndexWriter {
     try {
       super.close();
     } catch (Throwable e) {
-      ParWork.propegateInterrupt("Error closing IndexWriter", e);
+      ParWork.propagateInterrupt("Error closing IndexWriter", e);
     } finally {
       cleanup("close");
     }
@@ -383,7 +382,7 @@ public class SolrIndexWriter extends IndexWriter {
     try {
       super.rollback();
     } catch (Throwable e) {
-      ParWork.propegateInterrupt("Exception rolling back IndexWriter", e);
+      ParWork.propagateInterrupt("Exception rolling back IndexWriter", e);
     } finally {
       cleanup("rollback");
     }
diff --git a/solr/core/src/java/org/apache/solr/update/TimedVersionBucket.java b/solr/core/src/java/org/apache/solr/update/TimedVersionBucket.java
index 3f66c1e..641b9ea 100644
--- a/solr/core/src/java/org/apache/solr/update/TimedVersionBucket.java
+++ b/solr/core/src/java/org/apache/solr/update/TimedVersionBucket.java
@@ -63,7 +63,7 @@ public class TimedVersionBucket extends VersionBucket {
         condition.awaitNanos(nanosTimeout);
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new RuntimeException(e);
     }
   }
@@ -72,7 +72,7 @@ public class TimedVersionBucket extends VersionBucket {
     try {
       return lock.tryLock(lockTimeoutMs, TimeUnit.MILLISECONDS);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new RuntimeException(e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
index e45f22f..67c71da 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
@@ -16,6 +16,34 @@
  */
 package org.apache.solr.update;
 
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutorCompletionService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.atomic.LongAdder;
+
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Meter;
 import org.apache.commons.lang3.concurrent.ConcurrentUtils;
@@ -29,10 +57,8 @@ import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.common.util.OrderedExecutor;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
@@ -55,35 +81,6 @@ import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase.FROMLEADER;
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.nio.charset.Charset;
-import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Deque;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.ListIterator;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ExecutorCompletionService;
-import java.util.concurrent.Future;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.atomic.LongAdder;
 
 /**
  * This holds references to the transaction logs. It also keeps a map of unique key to location in log
@@ -450,7 +447,7 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
       }
       core.getCoreMetricManager().registerMetricProducer(SolrInfoBean.Category.TLOG.toString(), this);
     } catch (Throwable e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       ObjectReleaseTracker.release(this);
       if (e instanceof Error) {
         throw e;
@@ -728,7 +725,7 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
         }
 
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e, true);
+        ParWork.propagateInterrupt(e, true);
         SolrException.log(log, "Error opening realtime searcher", e);
         return null;
       }
@@ -1351,7 +1348,7 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
     } catch (IOException e) {
       log.error("Exception reading versions from log",e);
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
     } finally {
       if (logReader != null) logReader.close();
     }
diff --git a/solr/core/src/java/org/apache/solr/update/VersionBucket.java b/solr/core/src/java/org/apache/solr/update/VersionBucket.java
index 1939aec..056bb01 100644
--- a/solr/core/src/java/org/apache/solr/update/VersionBucket.java
+++ b/solr/core/src/java/org/apache/solr/update/VersionBucket.java
@@ -16,11 +16,11 @@
  */
 package org.apache.solr.update;
 
-import org.apache.solr.common.ParWork;
-
 import java.io.IOException;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.solr.common.ParWork;
+
 // TODO: make inner?
 // TODO: store the highest possible in the index on a commit (but how to not block adds?)
 // TODO: could also store highest possible in the transaction log after a commit.
@@ -70,7 +70,7 @@ public class VersionBucket {
         wait(millis);
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new RuntimeException(e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 7289dc4..4879fa9 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -267,7 +267,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
             }
 
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(ErrorCode.SERVER_ERROR, e);
           } finally {
             if (vinfo != null) vinfo.unlockForUpdate();
@@ -286,7 +286,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
             doDistribAdd(finalCloneCmd);
             if (log.isDebugEnabled()) log.debug("after distrib add collection");
           } catch (Throwable e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new SolrException(ErrorCode.SERVER_ERROR, e);
           }
         });
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
index 25429f4..6167789 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
@@ -28,8 +28,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -195,7 +193,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
         try {
           leaderReplica = zkController.getZkStateReader().getLeaderRetry(collection, cloudDesc.getShardId());
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(ErrorCode.SERVER_ERROR,
               "Exception finding leader for shard " + cloudDesc.getShardId(), e);
 
@@ -490,7 +488,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
         try {
           leader = zkController.getZkStateReader().getLeaderRetry(collection, sliceName);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Exception finding leader for shard " + sliceName, e);
         }
 
@@ -576,7 +574,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
         }
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted", e);
     }
     if (leaderLogic) {
@@ -622,7 +620,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
         }
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
     }
 
@@ -650,7 +648,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
     try {
       return zkController.getZkStateReader().getLeaderRetry(collection, cloudDesc.getShardId()).getCoreUrl();
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception during fetching from leader.", e);
     }
   }
@@ -794,7 +792,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
       }
 
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
     }
   }
@@ -1043,7 +1041,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
                     }
                   }
                 } catch (InterruptedException e) {
-                  ParWork.propegateInterrupt(e);
+                  ParWork.propagateInterrupt(e);
                 }
               }
             }
diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java
index 01c7d84..d7f92d5 100644
--- a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java
+++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java
@@ -15,13 +15,8 @@
  * limitations under the License.
  */
 package org.apache.solr.util;
-import org.apache.lucene.util.Accountable;
-import org.apache.lucene.util.PriorityQueue;
-import org.apache.lucene.util.RamUsageEstimator;
-import org.apache.solr.common.ParWork;
-import org.apache.solr.common.util.Cache;
-import org.apache.solr.common.util.TimeSource;
 
+import java.lang.ref.WeakReference;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -33,14 +28,19 @@ import java.util.Map;
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
-//import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.LongAdder;
 import java.util.concurrent.locks.ReentrantLock;
-import java.lang.ref.WeakReference;
 import java.util.function.Function;
 
+import org.apache.lucene.util.Accountable;
+import org.apache.lucene.util.PriorityQueue;
+import org.apache.lucene.util.RamUsageEstimator;
+import org.apache.solr.common.ParWork;
+import org.apache.solr.common.util.Cache;
+import org.apache.solr.common.util.TimeSource;
+
 import static org.apache.lucene.util.RamUsageEstimator.HASHTABLE_RAM_BYTES_PER_ENTRY;
 import static org.apache.lucene.util.RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED;
 
@@ -879,7 +879,7 @@ public class ConcurrentLRUCache<K,V> implements Cache<K,V>, Accountable {
           try {
             this.wait(waitTimeMs);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           }
         }
         if (stop) break;
diff --git a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
index d452b71..09ca3ad 100644
--- a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
+++ b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
@@ -81,7 +81,7 @@ public final class CryptoKeys {
         log.debug("verified {} ", verified);
         if (verified) return entry.getKey();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         exception = e;
         log.debug("NOT verified  ");
       }
@@ -100,7 +100,7 @@ public final class CryptoKeys {
         log.debug("verified {} ", verified);
         if (verified) return entry.getKey();
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         exception = e;
         log.debug("NOT verified  ");
       }
@@ -244,7 +244,7 @@ public final class CryptoKeys {
       try {
         return decodeAES(base64CipherTxt, pwd, strength);
       } catch (Exception exp) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         e = exp;
       }
     }
@@ -314,7 +314,7 @@ public final class CryptoKeys {
       X509EncodedKeySpec publicKeySpec = new X509EncodedKeySpec(Base64.base64ToByteArray(pubKey));
       return keyFactory.generatePublic(publicKeySpec);
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,e);
     }
   }
@@ -324,7 +324,7 @@ public final class CryptoKeys {
     try {
       rsaCipher = Cipher.getInstance("RSA/ECB/nopadding");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,e);
     }
     rsaCipher.init(Cipher.DECRYPT_MODE, pubKey);
@@ -400,7 +400,7 @@ public final class CryptoKeys {
         rsaCipher.init(Cipher.ENCRYPT_MODE, privateKey);
         return rsaCipher.doFinal(buffer.array(),buffer.position(), buffer.limit());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/util/ExportTool.java b/solr/core/src/java/org/apache/solr/util/ExportTool.java
index 28071c8..3a1b223 100644
--- a/solr/core/src/java/org/apache/solr/util/ExportTool.java
+++ b/solr/core/src/java/org/apache/solr/util/ExportTool.java
@@ -32,7 +32,6 @@ import java.time.format.DateTimeFormatter;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -166,7 +165,7 @@ public class ExportTool extends SolrCLI.ToolBase {
           try {
             sink.accept(doc);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             throw new RuntimeException(e);
           }
         }
@@ -421,7 +420,7 @@ public class ExportTool extends SolrCLI.ToolBase {
             try {
               coreHandler.exportDocsFromCore();
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               if (output != null) output.println("Error exporting docs from : " + s);
 
             }
@@ -471,7 +470,7 @@ public class ExportTool extends SolrCLI.ToolBase {
           try {
             doc = queue.poll(30, TimeUnit.SECONDS);
           } catch (InterruptedException e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             if (output != null) output.println("Consumer interrupted");
             failed = true;
             break;
@@ -481,7 +480,7 @@ public class ExportTool extends SolrCLI.ToolBase {
             if (docsWritten.get() > limit) continue;
             sink.accept(doc);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             if (output != null) output.println("Failed to write to file " + e.getMessage());
             failed = true;
           }
@@ -518,7 +517,7 @@ public class ExportTool extends SolrCLI.ToolBase {
               queue.offer(doc, 10, TimeUnit.SECONDS);
               receivedDocs.incrementAndGet();
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               failed = true;
               if (output != null) output.println("Failed to write docs from" + e.getMessage());
             }
diff --git a/solr/core/src/java/org/apache/solr/util/FSHDFSUtils.java b/solr/core/src/java/org/apache/solr/util/FSHDFSUtils.java
index 591fcdd..b00b20c 100644
--- a/solr/core/src/java/org/apache/solr/util/FSHDFSUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/FSHDFSUtils.java
@@ -121,7 +121,7 @@ public class FSHDFSUtils {
           }
         }
       } catch (InterruptedException ie) {
-        ParWork.propegateInterrupt(ie);
+        ParWork.propagateInterrupt(ie);
         InterruptedIOException iioe = new InterruptedIOException();
         iioe.initCause(ie);
         throw iioe;
diff --git a/solr/core/src/java/org/apache/solr/util/FileUtils.java b/solr/core/src/java/org/apache/solr/util/FileUtils.java
index 4e649c6..e6956f4 100644
--- a/solr/core/src/java/org/apache/solr/util/FileUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/FileUtils.java
@@ -81,7 +81,7 @@ public class FileUtils {
           // Pause 5 msec
           Thread.sleep(5);
         } catch (InterruptedException ie) {
-          ParWork.propegateInterrupt(ie);
+          ParWork.propagateInterrupt(ie);
           break;
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/util/PackageTool.java b/solr/core/src/java/org/apache/solr/util/PackageTool.java
index a9bc092..5adeccd 100644
--- a/solr/core/src/java/org/apache/solr/util/PackageTool.java
+++ b/solr/core/src/java/org/apache/solr/util/PackageTool.java
@@ -16,9 +16,6 @@
  */
 package org.apache.solr.util;
 
-import static org.apache.solr.packagemanager.PackageUtils.printGreen;
-import static org.apache.solr.packagemanager.PackageUtils.print;
-
 import java.io.File;
 import java.lang.invoke.MethodHandles;
 import java.nio.file.Paths;
@@ -48,6 +45,9 @@ import org.apache.solr.util.SolrCLI.StatusTool;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.packagemanager.PackageUtils.print;
+import static org.apache.solr.packagemanager.PackageUtils.printGreen;
+
 public class PackageTool extends SolrCLI.ToolBase {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -206,7 +206,7 @@ public class PackageTool extends SolrCLI.ToolBase {
       log.info("Finished: {}", cmd);
 
     } catch (Exception ex) {
-      ParWork.propegateInterrupt(ex);
+      ParWork.propagateInterrupt(ex);
       ex.printStackTrace(); // We need to print this since SolrCLI drops the stack trace in favour of brevity. Package tool should surely print full stacktraces!
       throw ex;
     }
diff --git a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
index 5998b7c..c9e09f2 100644
--- a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
+++ b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
@@ -23,7 +23,6 @@ import javax.xml.xpath.XPath;
 import javax.xml.xpath.XPathConstants;
 import javax.xml.xpath.XPathExpression;
 import javax.xml.xpath.XPathExpressionException;
-import javax.xml.xpath.XPathFactory;
 import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -65,7 +64,6 @@ import java.util.zip.InflaterInputStream;
 
 import org.apache.solr.common.ParWork;
 import org.apache.solr.core.XmlConfigFile;
-import org.apache.solr.schema.IndexSchema;
 import org.w3c.dom.Document;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
@@ -903,7 +901,7 @@ public class SimplePostTool {
     } catch (IOException e) {
       warn("An error occurred getting data from "+url+". Please check that Solr is running.");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       warn("An error occurred getting data from "+url+". Message: " + e.getMessage());
     }
   }
@@ -943,7 +941,7 @@ public class SimplePostTool {
         fatal("Connection error (is Solr running at " + solrUrl + " ?): " + e);
         success = false;
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         fatal("POST failed with error " + e.getMessage());
       }
 
@@ -1280,7 +1278,7 @@ public class SimplePostTool {
       } catch (IOException e) {
         warn("IOException opening URL "+url+": "+e.getMessage());
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new RuntimeException(e);
       }
       return l;
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 94ca548..be4fe5a 100755
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -197,7 +197,7 @@ public class SolrCLI implements CLIO {
       try {
         runImpl(cli);
       } catch (Exception exc) {
-        ParWork.propegateInterrupt(exc);
+        ParWork.propagateInterrupt(exc);
         // since this is a CLI, spare the user the stacktrace
         String excMsg = exc.getMessage();
         if (excMsg != null) {
@@ -559,7 +559,7 @@ public class SolrCLI implements CLIO {
           toolClasses.add((Class<Tool>) theClass);
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       // safe to squelch this as it's just looking for tools to run
       log.debug("Failed to find Tool impl classes in {}, due to: ", packageName, e);
     }
@@ -638,7 +638,7 @@ public class SolrCLI implements CLIO {
       try {
         HttpClientUtil.close(httpClient);
       } catch (Exception exc) {
-        ParWork.propegateInterrupt(exc);
+        ParWork.propagateInterrupt(exc);
         // safe to ignore, we're just shutting things down
       }
     }
@@ -677,7 +677,7 @@ public class SolrCLI implements CLIO {
       try {
         json = getJson(httpClient, getUrl);
       } catch (Exception exc) {
-        ParWork.propegateInterrupt(exc);
+        ParWork.propagateInterrupt(exc);
         if (exceptionIsAuthRelated(exc)) {
           throw exc;
         }
@@ -1045,7 +1045,7 @@ public class SolrCLI implements CLIO {
         try {
           iterations = Integer.parseInt(iterStr);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Invalid option 'i' value, using default 10:", e);
           iterations = 10;
         }
@@ -1198,7 +1198,7 @@ public class SolrCLI implements CLIO {
           try {
             simCloudManager.request(operation);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             CLIO.err("Aborting - error executing suggestion " + suggestion + ": " + e);
             Map<String, Object> error = new HashMap<>();
             error.put("suggestion", suggestion);
@@ -1288,7 +1288,7 @@ public class SolrCLI implements CLIO {
           new JSONWriter(arr, 2).write(getStatus(solrUrl));
           echo(arr.toString());
         } catch (Exception exc) {
-          ParWork.propegateInterrupt(exc);
+          ParWork.propagateInterrupt(exc);
           if (exceptionIsAuthRelated(exc)) {
             throw exc;
           }
@@ -1310,14 +1310,14 @@ public class SolrCLI implements CLIO {
         } catch (SSLPeerUnverifiedException exc) {
           throw exc;
         } catch (Exception exc) {
-          ParWork.propegateInterrupt(exc);
+          ParWork.propagateInterrupt(exc);
           if (exceptionIsAuthRelated(exc)) {
             throw exc;
           }
           try {
             Thread.sleep(2000L);
           } catch (InterruptedException interrupted) {
-            ParWork.propegateInterrupt(interrupted);
+            ParWork.propagateInterrupt(interrupted);
             break;
           }
         }
@@ -1619,7 +1619,7 @@ public class SolrCLI implements CLIO {
       try {
         docCount = qr.getResults().getNumFound();
       } catch (Exception exc) {
-        ParWork.propegateInterrupt(exc);
+        ParWork.propagateInterrupt(exc);
         collErr = String.valueOf(exc);
       }
 
@@ -1633,7 +1633,7 @@ public class SolrCLI implements CLIO {
         try {
           leaderUrl = zkStateReader.getLeaderUrl(collection, shardName, 1000);
         } catch (Exception exc) {
-          ParWork.propegateInterrupt(exc);
+          ParWork.propagateInterrupt(exc);
           log.warn("Failed to get leader for shard {} due to: {}", shardName, exc);
         }
 
@@ -1676,7 +1676,7 @@ public class SolrCLI implements CLIO {
               // if we get here, we can trust the state
               replicaStatus = replicaCoreProps.getState();
             } catch (Exception exc) {
-              ParWork.propegateInterrupt(exc);
+              ParWork.propagateInterrupt(exc);
               log.error("ERROR: {} when trying to reach: {}", exc, coreUrl);
 
               if (checkCommunicationError(exc)) {
@@ -1851,7 +1851,7 @@ public class SolrCLI implements CLIO {
       List<String> collections = (List<String>) existsCheckResult.get("collections");
       exists = collections != null && collections.contains(collection);
     } catch (Exception exc) {
-      ParWork.propegateInterrupt(exc);
+      ParWork.propagateInterrupt(exc);
       // just ignore it since we're only interested in a positive result here
     }
     return exists;
@@ -1881,7 +1881,7 @@ public class SolrCLI implements CLIO {
       }while (wait &&
           System.nanoTime() - startWaitAt < MAX_WAIT_FOR_CORE_LOAD_NANOS);
     } catch (Exception exc) {
-      ParWork.propegateInterrupt(exc);
+      ParWork.propagateInterrupt(exc);
       // just ignore it since we're only interested in a positive result here
     }
     return exists;
@@ -2155,7 +2155,7 @@ public class SolrCLI implements CLIO {
           echo(String.format(Locale.ROOT, "\nCreated new core '%s'", coreName));
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         /* create-core failed, cleanup the copied configset before propagating the error. */
         FileUtils.deleteDirectory(coreInstanceDir);
         throw e;
@@ -2268,7 +2268,7 @@ public class SolrCLI implements CLIO {
 
         zkClient.upConfig(confPath, confName);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Could not complete upconfig operation for reason: {}", e.getMessage());
         throw (e);
       }
@@ -2343,7 +2343,7 @@ public class SolrCLI implements CLIO {
 
         zkClient.downConfig(confName, configSetPath);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Could not complete downconfig operation for reason: {}", e.getMessage());
         throw (e);
       }
@@ -2421,7 +2421,7 @@ public class SolrCLI implements CLIO {
             " recurse: " + Boolean.toString(recurse));
         zkClient.clean(znode);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Could not complete rm operation for reason: {}", e.getMessage());
         throw (e);
       }
@@ -2491,7 +2491,7 @@ public class SolrCLI implements CLIO {
             " recurse: " + Boolean.toString(recurse), cli);
         stdout.print(zkClient.listZnode(znode, recurse));
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Could not complete ls operation for reason: {}", e.getMessage());
         throw (e);
       }
@@ -2552,7 +2552,7 @@ public class SolrCLI implements CLIO {
         echo("Creating Zookeeper path " + znode + " on ZooKeeper at " + zkHost);
         zkClient.mkdir(znode);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Could not complete mkroot operation for reason: {}", e.getMessage());
         throw (e);
       }
@@ -2645,7 +2645,7 @@ public class SolrCLI implements CLIO {
         }
         zkClient.zkTransfer(srcName, srcIsZk, dstName, dstIsZk, recurse);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Could not complete the zk operation for reason: {}", e.getMessage());
         throw (e);
       }
@@ -2725,7 +2725,7 @@ public class SolrCLI implements CLIO {
         echo("Moving Znode " + source + " to " + dest + " on ZooKeeper at " + zkHost);
         zkClient.moveZnode(source, dest);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         log.error("Could not complete mv operation for reason: {}", e.getMessage());
         throw (e);
       }
@@ -2874,7 +2874,7 @@ public class SolrCLI implements CLIO {
         try {
           zkStateReader.getZkClient().clean(configZnode);
         } catch (Exception exc) {
-          ParWork.propegateInterrupt(exc);
+          ParWork.propagateInterrupt(exc);
           echo("\nWARNING: Failed to delete configuration directory "+configZnode+" in ZooKeeper due to: "+
               exc.getMessage()+"\nYou'll need to manually delete this znode using the zkcli script.");
         }
@@ -3409,7 +3409,7 @@ public class SolrCLI implements CLIO {
       try {
         configTool.runTool(processCommandLineArgs(joinCommonAndToolOptions(configTool.getOptions()), configArgs));
       } catch (Exception exc) {
-        ParWork.propegateInterrupt(exc);
+        ParWork.propagateInterrupt(exc);
         CLIO.err("Failed to update '"+propName+"' property due to: "+exc);
       }
     }
@@ -3429,7 +3429,7 @@ public class SolrCLI implements CLIO {
           try {
             Thread.sleep(2000);
           } catch (InterruptedException ie) {
-            ParWork.propegateInterrupt(ie);
+            ParWork.propagateInterrupt(ie);
             return;
           }
           liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
@@ -3441,14 +3441,14 @@ public class SolrCLI implements CLIO {
               " seconds! Please check the solr.log for each node to look for errors.\n");
         }
       } catch (Exception exc) {
-        ParWork.propegateInterrupt(exc);
+        ParWork.propagateInterrupt(exc);
         CLIO.err("Failed to see if "+numNodes+" joined the SolrCloud cluster due to: "+exc);
       } finally {
         if (cloudClient != null) {
           try {
             cloudClient.close();
           } catch (Exception ignore) {
-            ParWork.propegateInterrupt(ignore);
+            ParWork.propagateInterrupt(ignore);
           }
         }
       }
@@ -3528,7 +3528,7 @@ public class SolrCLI implements CLIO {
         try {
           handler.waitFor(3000);
         } catch (InterruptedException ie) {
-          ParWork.propegateInterrupt(ie);
+          ParWork.propagateInterrupt(ie);
         }
         if (handler.hasResult() && handler.getExitValue() != 0) {
           throw new Exception("Failed to start Solr using command: "+startCmd+" Exception : "+handler.getException());
@@ -3556,7 +3556,7 @@ public class SolrCLI implements CLIO {
         nodeStatus = (new StatusTool()).getStatus(solrUrl);
       } catch (Exception ignore) {
         /* just trying to determine if this example is already running. */
-        ParWork.propegateInterrupt(ignore);
+        ParWork.propagateInterrupt(ignore);
       }
 
       if (nodeStatus != null) {
@@ -3919,7 +3919,7 @@ public class SolrCLI implements CLIO {
       try {
         toolExitStatus = runAssert(cli);
       } catch (Exception exc) {
-        ParWork.propegateInterrupt(exc);
+        ParWork.propagateInterrupt(exc);
         // since this is a CLI, spare the user the stacktrace
         String excMsg = exc.getMessage();
         if (excMsg != null) {
@@ -3998,7 +3998,7 @@ public class SolrCLI implements CLIO {
       try {
         status.waitToSeeSolrUp(url, timeoutMs.orElse(1000L).intValue() / 1000);
       } catch (Exception se) {
-        ParWork.propegateInterrupt(se);
+        ParWork.propagateInterrupt(se);
         if (exceptionIsAuthRelated(se)) {
           throw se;
         }
@@ -4025,11 +4025,11 @@ public class SolrCLI implements CLIO {
             log.debug("Solr still up. Waiting before trying again to see if it was stopped");
             Thread.sleep(1000L);
           } catch (InterruptedException interrupted) {
-            ParWork.propegateInterrupt(interrupted);
+            ParWork.propagateInterrupt(interrupted);
             timeout = 0; // stop looping
           }
         } catch (Exception se) {
-          ParWork.propegateInterrupt(se);
+          ParWork.propagateInterrupt(se);
           if (exceptionIsAuthRelated(se)) {
             throw se;
           }
@@ -4128,7 +4128,7 @@ public class SolrCLI implements CLIO {
         status.waitToSeeSolrUp(url, timeoutMs.orElse(1000L).intValue() / 1000);
         return true;
       } catch (Exception se) {
-        ParWork.propegateInterrupt(se);
+        ParWork.propagateInterrupt(se);
         if (exceptionIsAuthRelated(se)) {
           throw se;
         }
@@ -4142,7 +4142,7 @@ public class SolrCLI implements CLIO {
         final CollectionAdminResponse response = request.process(client);
         return response != null;
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (exceptionIsAuthRelated(e)) {
           throw e;
         }
@@ -4283,7 +4283,7 @@ public class SolrCLI implements CLIO {
             try {
               zkHost = getZkHost(cli);
             } catch (Exception ex) {
-              ParWork.propegateInterrupt(ex);
+              ParWork.propagateInterrupt(ex);
               CLIO.out("Unable to access ZooKeeper. Please add the following security.json to ZooKeeper (in case of SolrCloud):\n"
                   + securityJson + "\n");
               zkInaccessible = true;
@@ -4308,7 +4308,7 @@ public class SolrCLI implements CLIO {
                   }
                 }
               } catch (Exception ex) {
-                ParWork.propegateInterrupt(ex);
+                ParWork.propagateInterrupt(ex);
                 if (zkInaccessible == false) {
                   CLIO.out("Unable to access ZooKeeper. Please add the following security.json to ZooKeeper (in case of SolrCloud):\n"
                       + securityJson + "\n");
@@ -4325,7 +4325,7 @@ public class SolrCLI implements CLIO {
                 zkClient.start();
                 zkClient.setData("/security.json", securityJson.getBytes(StandardCharsets.UTF_8), true);
               } catch (Exception ex) {
-                ParWork.propegateInterrupt(ex);
+                ParWork.propagateInterrupt(ex);
                 if (zkInaccessible == false) {
                   CLIO.out("Unable to access ZooKeeper. Please add the following security.json to ZooKeeper (in case of SolrCloud):\n"
                       + securityJson);
@@ -4414,7 +4414,7 @@ public class SolrCLI implements CLIO {
             try {
               zkHost = getZkHost(cli);
             } catch (Exception ex) {
-              ParWork.propegateInterrupt(ex);
+              ParWork.propagateInterrupt(ex);
               if (cli.hasOption("zkHost")) {
                 CLIO.out("Couldn't get ZooKeeper host. Please make sure that ZooKeeper is running and the correct zkHost has been passed in.");
               } else {
diff --git a/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java b/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java
index 54ca028..68816d4 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java
@@ -16,21 +16,26 @@
  */
 package org.apache.solr.util;
 
-import java.io.*;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.net.URLDecoder;
 import java.nio.charset.Charset;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.ArrayList;
-import java.net.URLDecoder;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.UUID;
-import java.util.regex.Pattern;
 import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrInputDocument;
@@ -137,7 +142,7 @@ public class SolrLogPostTool {
         client.commit();
         CLIO.out("Committed");
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         CLIO.err("Unable to commit documents: " + e.getMessage());
         e.printStackTrace(CLIO.getErrStream());
       }
diff --git a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
index cda1bab..20d212d 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
@@ -34,6 +34,7 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.regex.Pattern;
 
+import com.google.common.collect.ImmutableMap;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanClause.Occur;
@@ -78,8 +79,6 @@ import org.apache.solr.search.SyntaxError;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.ImmutableMap;
-
 import static java.util.Collections.singletonList;
 import static org.apache.solr.core.PluginInfo.APPENDS;
 import static org.apache.solr.core.PluginInfo.DEFAULTS;
@@ -939,7 +938,7 @@ public class SolrPluginUtils {
         try {
           return super.getFieldQuery(field, queryText, quoted, raw);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           return null;
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/util/SpatialUtils.java b/solr/core/src/java/org/apache/solr/util/SpatialUtils.java
index 523d010..45cfd68 100644
--- a/solr/core/src/java/org/apache/solr/util/SpatialUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/SpatialUtils.java
@@ -19,12 +19,12 @@ package org.apache.solr.util;
 import java.text.ParseException;
 
 import org.apache.solr.common.ParWork;
+import org.apache.solr.common.SolrException;
 import org.locationtech.spatial4j.context.SpatialContext;
 import org.locationtech.spatial4j.exception.InvalidShapeException;
 import org.locationtech.spatial4j.shape.Point;
 import org.locationtech.spatial4j.shape.Rectangle;
 import org.locationtech.spatial4j.shape.Shape;
-import org.apache.solr.common.SolrException;
 
 /** Utility methods pertaining to spatial. */
 public class SpatialUtils {
@@ -89,7 +89,7 @@ public class SpatialUtils {
     } catch (InvalidShapeException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new InvalidShapeException(e.toString(), e);
     }
   }
@@ -137,7 +137,7 @@ public class SpatialUtils {
     } catch (InvalidShapeException e) {
       throw e;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new InvalidShapeException(e.toString(), e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java b/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java
index 4069d20..7eb079b 100644
--- a/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java
@@ -82,7 +82,7 @@ public final class StartupLoggingUtils {
       });
       return true;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       logNotSupported("Could not mute logging to console.");
       return false;
     }
@@ -108,7 +108,7 @@ public final class StartupLoggingUtils {
       ctx.updateLoggers();
       return true;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       logNotSupported("Could not change log level.");
       return false;
     }
@@ -121,7 +121,7 @@ public final class StartupLoggingUtils {
       // Make sure that log4j is really selected as logger in slf4j - we could have LogManager in the bridge class :)
       return binder.getLoggerFactoryClassStr().contains("Log4jLoggerFactory");
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e, true);
+      ParWork.propagateInterrupt(e, true);
       return false;
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/util/TestInjection.java b/solr/core/src/java/org/apache/solr/util/TestInjection.java
index a4a3148..c625046 100644
--- a/solr/core/src/java/org/apache/solr/util/TestInjection.java
+++ b/solr/core/src/java/org/apache/solr/util/TestInjection.java
@@ -98,7 +98,7 @@ public class TestInjection {
         Method randomMethod = LUCENE_TEST_CASE.getMethod("random");
         return (Random) randomMethod.invoke(null);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new IllegalStateException("Unable to use reflection to invoke LuceneTestCase.random()", e);
       }
     }
@@ -240,7 +240,7 @@ public class TestInjection {
         try {
           Thread.sleep(delay * 1000);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
     }
@@ -274,7 +274,7 @@ public class TestInjection {
                 Random taskRand = random();
                 Thread.sleep(taskRand.nextInt(1000));
               } catch (InterruptedException e) {
-                ParWork.propegateInterrupt(e);
+                ParWork.propagateInterrupt(e);
               }
               
               cthread.interrupt();
@@ -367,7 +367,7 @@ public class TestInjection {
         try {
           Thread.sleep(rndTime);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
     }
@@ -395,7 +395,7 @@ public class TestInjection {
         try {
           Thread.sleep(rndTime);
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       }
     }
@@ -419,7 +419,7 @@ public class TestInjection {
         try {
           notifyPauseForeverDone.await();
         } catch (InterruptedException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
       } else {
         countPrepRecoveryOpPauseForever.set(0);
@@ -459,7 +459,7 @@ public class TestInjection {
         log.info("Waiting in ReplicaMutator for up to 60s");
         return splitLatch.await(60, TimeUnit.SECONDS);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
     return true;
@@ -471,7 +471,7 @@ public class TestInjection {
         log.info("Waiting in DirectUpdateHandler2 for up to 60s");
         return directUpdateLatch.await(60, TimeUnit.SECONDS);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
     return true;
@@ -500,7 +500,7 @@ public class TestInjection {
         log.info("Waiting in ReindexCollectionCmd for up to 60s");
         return reindexLatch.await(60, TimeUnit.SECONDS);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
     return true;
@@ -526,7 +526,7 @@ public class TestInjection {
         log.info("Pausing IndexFetcher for {}ms", delayBeforeSlaveCommitRefresh);
         Thread.sleep(delayBeforeSlaveCommitRefresh);
       } catch (InterruptedException e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
       }
     }
     return true;
diff --git a/solr/core/src/java/org/apache/solr/util/VersionedFile.java b/solr/core/src/java/org/apache/solr/util/VersionedFile.java
index 3c1896c..de2f54b 100644
--- a/solr/core/src/java/org/apache/solr/util/VersionedFile.java
+++ b/solr/core/src/java/org/apache/solr/util/VersionedFile.java
@@ -16,8 +16,6 @@
  */
 package org.apache.solr.util;
 
-import org.apache.solr.common.ParWork;
-
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -32,6 +30,8 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.solr.common.ParWork;
+
 
 /**
  * 
@@ -74,7 +74,7 @@ public class VersionedFile
 
         is = new FileInputStream(f);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         // swallow exception for now
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java b/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java
index e451515..34e6bc1 100644
--- a/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java
+++ b/solr/core/src/java/org/apache/solr/util/plugin/AbstractPluginLoader.java
@@ -16,29 +16,24 @@
  */
 package org.apache.solr.util.plugin;
 
+import javax.xml.xpath.XPath;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Objects;
 
-import net.sf.saxon.dom.DOMNodeList;
-import net.sf.saxon.dom.DocumentOverNodeInfo;
-import net.sf.saxon.om.NodeInfo;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.core.XmlConfigFile;
-import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.util.DOMUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
-import javax.xml.xpath.XPath;
 
 /**
  * An abstract super class that manages standard solr-style plugin configuration.
@@ -190,7 +185,7 @@ public abstract class AbstractPluginLoader<T>
           }
         }
         catch (Exception ex) {
-          ParWork.propegateInterrupt(ex);
+          ParWork.propagateInterrupt(ex);
           SolrException e = new SolrException
             (ErrorCode.SERVER_ERROR,
              "Plugin init failure for " + type + 
@@ -261,7 +256,7 @@ public abstract class AbstractPluginLoader<T>
       }
 
     } catch (Exception ex) {
-      ParWork.propegateInterrupt(ex);
+      ParWork.propagateInterrupt(ex);
       SolrException e = new SolrException
         (ErrorCode.SERVER_ERROR, "Plugin init failure for " + type, ex);
       throw e;
@@ -272,7 +267,7 @@ public abstract class AbstractPluginLoader<T>
       try {
         init(pinfo.plugin, pinfo.node);
       } catch (Exception ex) {
-        ParWork.propegateInterrupt(ex);
+        ParWork.propagateInterrupt(ex);
         SolrException e = new SolrException
           (ErrorCode.SERVER_ERROR, "Plugin init failure for " + type, ex);
         throw e;
diff --git a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
index 3a64bbb6..3fd47ae 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/InstrumentedPoolingHttpClientConnectionManager.java
@@ -62,7 +62,7 @@ public class InstrumentedPoolingHttpClientConnectionManager extends PoolingHttpC
     try {
       SolrMetricProducer.super.close();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new RuntimeException("Exception closing.", e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index f45f7c4..f06e94d 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -600,7 +600,7 @@ public class MetricUtils {
           String metricName = MetricRegistry.name(prefix, name);
           consumer.accept(metricName, gauge);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           // didn't work, skip it...
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java b/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java
index a10a0f3..8731567 100644
--- a/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java
+++ b/solr/core/src/java/org/apache/solr/util/xslt/TransformerProvider.java
@@ -16,6 +16,15 @@
  */
 package org.apache.solr.util.xslt;
 
+import javax.xml.transform.Templates;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerConfigurationException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.stream.StreamSource;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.concurrent.TimeUnit;
+
 import net.sf.saxon.BasicTransformerFactory;
 import org.apache.commons.io.IOUtils;
 import org.apache.lucene.analysis.util.ResourceLoader;
@@ -23,21 +32,11 @@ import org.apache.solr.common.ParWork;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.common.util.XMLErrorLogger;
 import org.apache.solr.core.SolrConfig;
-import org.apache.solr.core.XmlConfigFile;
 import org.apache.solr.util.SystemIdResolver;
 import org.apache.solr.util.TimeOut;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.xml.transform.Templates;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerConfigurationException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.stream.StreamSource;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.concurrent.TimeUnit;
-
 /** Singleton that creates a Transformer for the XSLTServletFilter.
  *  For now, only caches the last created Transformer, but
  *  could evolve to use an LRU cache of Transformers.
@@ -115,7 +114,7 @@ public class TransformerProvider {
         IOUtils.closeQuietly(src.getInputStream());
       }
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.error(getClass().getName(), "newTemplates", e);
       throw new IOException("Unable to initialize Templates '" + filename + "'", e);
     }
diff --git a/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java b/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java
index 0e55554..31afcfc 100644
--- a/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java
+++ b/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java
@@ -16,6 +16,11 @@
  */
 package org.apache.solr.client.solrj.impl;
 
+import java.io.IOException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
 import org.apache.http.HttpClientConnection;
 import org.apache.http.HttpConnectionMetrics;
 import org.apache.http.HttpException;
@@ -42,11 +47,6 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
 @SolrTestCase.SuppressSSL
 @Ignore // nocommit look at this again later
 public class ConnectionReuseTest extends SolrCloudTestCase {
@@ -113,7 +113,7 @@ public class ConnectionReuseTest extends SolrCloudTestCase {
           try {
             client.add(c.solrDoc);
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             e.printStackTrace();
           }
           if (!done && i > 0 && i < cnt2 - 1 && client instanceof ConcurrentUpdateSolrClient
@@ -126,7 +126,7 @@ public class ConnectionReuseTest extends SolrCloudTestCase {
           try {
             ((ConcurrentUpdateSolrClient) client).blockUntilFinished();
           } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             e.printStackTrace();
           }
         }
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
index ac0ba3c..711f769 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.cloud;
 
+import javax.script.ScriptEngineManager;
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
@@ -42,16 +43,10 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeoutException;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipOutputStream;
-import javax.script.ScriptEngineManager;
 
 import com.google.common.collect.ImmutableMap;
 import org.apache.commons.io.FileUtils;
-import org.apache.http.HttpEntity;
 import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.util.EntityUtils;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.SolrClient;
@@ -60,7 +55,6 @@ import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
 import org.apache.solr.client.solrj.impl.CloudHttp2SolrClient;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.request.ConfigSetAdminRequest;
@@ -608,7 +602,7 @@ public class TestConfigSetsAPI extends SolrTestCaseJ4 {
         throw new AssertionError(e);
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
     } catch (ExecutionException e) {
       e.printStackTrace();
     } catch (TimeoutException e) {
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java
index 057b212..1f1b4b4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java
@@ -17,7 +17,6 @@
 
 package org.apache.solr.cloud;
 
-import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,17 +31,14 @@ import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Supplier;
 
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.cloud.ShardTerms;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.util.TimeOut;
-import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -224,7 +220,7 @@ public class ZkShardTermsTest extends SolrCloudTestCase {
               Thread.sleep(LuceneTestCase.random().nextInt(TEST_NIGHTLY ? 200 : 50));
               zkShardTerms.setTermEqualsToLeader(replica);
             } catch (InterruptedException e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               e.printStackTrace();
             }
           }
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java
index c099115..9ca2165 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java
@@ -17,25 +17,14 @@
 package org.apache.solr.cloud;
 
 import java.nio.file.Path;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.ParWork;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkCmdExecutor;
-import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.Watcher;
-import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
 
 public class ZkSolrClientTest extends SolrTestCaseJ4 {
 
@@ -179,7 +168,7 @@ public class ZkSolrClientTest extends SolrTestCaseJ4 {
           zkClient.mkdir("collections/collection4");
           break;
         } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
         Thread.sleep(50 * i);
       }
diff --git a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
index 06aa48d..7b74047 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
@@ -24,20 +24,14 @@ import java.util.Map;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
 import org.apache.http.client.HttpClient;
 import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudHttp2SolrClient;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
@@ -49,7 +43,6 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.util.RTimer;
 import org.apache.solr.util.SimplePostTool;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.noggit.JSONParser;
 import org.slf4j.Logger;
@@ -187,7 +180,7 @@ public class TestBlobHandler extends AbstractFullDistribZkTestBase {
         fail();
       }
     } catch (InterruptedException e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
     } catch (ExecutionException e) {
       e.printStackTrace();
     } catch (TimeoutException e) {
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
index 1b9f0d6..200e000 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
@@ -27,15 +27,10 @@ import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.http.HttpEntity;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.util.EntityUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.impl.CloudHttp2SolrClient;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
-import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
 import org.apache.solr.cloud.SolrCloudBridgeTestCase;
 import org.apache.solr.common.LinkedHashMapWriter;
 import org.apache.solr.common.MapWriter;
@@ -82,7 +77,7 @@ public class TestSolrConfigHandlerConcurrent extends SolrCloudBridgeTestCase {
               Thread.sleep(LuceneTestCase.random().nextInt(TEST_NIGHTLY ? 1000 : 50));
               invokeBulkCall((String) e.getKey(), errs, value);
             } catch (Exception e1) {
-              ParWork.propegateInterrupt(e1);
+              ParWork.propagateInterrupt(e1);
               return null;
             }
             return null;
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
index d3ebe87..629f01d 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
@@ -21,7 +21,6 @@ import javax.management.MBeanServer;
 import javax.management.MBeanServerFactory;
 import javax.management.ObjectInstance;
 import javax.management.ObjectName;
-
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
@@ -217,7 +216,7 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
               // no longer present
               break;
             } catch (Exception e) {
-              ParWork.propegateInterrupt(e);
+              ParWork.propagateInterrupt(e);
               fail("Unexpected error retrieving attribute: " + e.toString());
             }
           }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java
index 13a15d7..8b5ec1b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java
@@ -16,13 +16,6 @@
  */
 package org.apache.solr.client.solrj;
 
-import org.apache.solr.common.MapWriter;
-import org.apache.solr.common.ParWork;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SuppressForbidden;
-
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -30,6 +23,13 @@ import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
 import java.io.Serializable;
 
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.ParWork;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SuppressForbidden;
+
 
 /**
  * 
@@ -74,7 +74,7 @@ public abstract class SolrResponse implements Serializable, MapWriter {
       outputStream.writeObject(response);
       return byteStream.toByteArray();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
   }
@@ -87,7 +87,7 @@ public abstract class SolrResponse implements Serializable, MapWriter {
       ObjectInputStream inputStream = new ObjectInputStream(byteStream);
       return (SolrResponse) inputStream.readObject();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new SolrException(ErrorCode.SERVER_ERROR, e);
     }
   }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java b/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java
index 813c766..a545058 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/beans/DocumentObjectBinder.java
@@ -16,20 +16,31 @@
  */
 package org.apache.solr.client.solrj.beans;
 
+import java.lang.reflect.AccessibleObject;
+import java.lang.reflect.Array;
+import java.lang.reflect.GenericArrayType;
+import java.lang.reflect.Method;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.nio.ByteBuffer;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Pattern;
+
 import org.apache.solr.common.ParWork;
-import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.util.SuppressForbidden;
 
-import java.lang.reflect.*;
-import java.security.AccessController;
-import java.security.PrivilegedAction;
-import java.util.*;
-import java.util.regex.Pattern;
-import java.util.concurrent.ConcurrentHashMap;
-import java.nio.ByteBuffer;
-
 /**
  * A class to map objects to and from solr documents.
  *
@@ -69,7 +80,7 @@ public class DocumentObjectBinder {
       }
       return obj;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throw new BindingException("Could not instantiate object of " + clazz, e);
     }
   }
@@ -192,7 +203,7 @@ public class DocumentObjectBinder {
           try {
             getter = setter.getDeclaringClass().getMethod(gname, (Class[]) null);
           } catch (Exception ex) {
-            ParWork.propegateInterrupt(ex);
+            ParWork.propagateInterrupt(ex);
             // no getter -- don't worry about it...
             if (type == Boolean.class) {
               gname = "is" + setter.getName().substring(3);
@@ -456,7 +467,7 @@ public class DocumentObjectBinder {
         }
       }
       catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new BindingException("Exception while setting value : " + v + " on " + (field != null ? field : setter), e);
       }
     }
@@ -466,7 +477,7 @@ public class DocumentObjectBinder {
         try {
           return field.get(obj);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           throw new BindingException("Exception while getting value: " + field, e);
         }
       } else if (getter == null) {
@@ -476,7 +487,7 @@ public class DocumentObjectBinder {
       try {
         return getter.invoke(obj, (Object[]) null);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         throw new BindingException("Exception while getting value: " + getter, e);
       }
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java
index a40ca38..b0d23b9 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java
@@ -16,6 +16,8 @@
  */
 package org.apache.solr.client.solrj.cloud;
 
+import javax.net.ssl.SSLServerSocketFactory;
+import javax.net.ssl.SSLSocketFactory;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -33,9 +35,6 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 
-import javax.net.ssl.SSLServerSocketFactory;
-import javax.net.ssl.SSLSocketFactory;
-
 import org.apache.solr.common.ParWork;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -200,7 +199,7 @@ public class SocketProxy {
       serverSocket.bind(new InetSocketAddress(proxyUrl.getPort()));
       doOpen();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       if (log.isDebugEnabled()) {
         log.debug("exception on reopen url:{} ", getUrl(), e);
       }
@@ -242,7 +241,7 @@ public class SocketProxy {
     try {
       c.close();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.debug("exception on close of: {}", c, e);
     }
   }
@@ -251,7 +250,7 @@ public class SocketProxy {
     try {
       c.halfClose();
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       log.debug("exception on half close of: {}", c, e);
     }
   }
@@ -388,7 +387,7 @@ public class SocketProxy {
               out.write(buf, 0, len);
           }
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           if (log.isDebugEnabled()) {
             log.debug("read/write failed, reason: {}", e.getLocalizedMessage());
           }
@@ -399,14 +398,14 @@ public class SocketProxy {
               close();
             }
           } catch (Exception ignore) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
           }
         } finally {
           if (in != null) {
             try {
               in.close();
             } catch (Exception exc) {
-              ParWork.propegateInterrupt(exc);
+              ParWork.propagateInterrupt(exc);
               log.debug("Error when closing InputStream on socket: {}", src, exc);
             }
           }
@@ -414,7 +413,7 @@ public class SocketProxy {
             try {
               out.close();
             } catch (Exception exc) {
-              ParWork.propegateInterrupt(exc);
+              ParWork.propagateInterrupt(exc);
               log.debug("{} when closing OutputStream on socket: {}", exc, destination);
             }
           }
@@ -463,7 +462,7 @@ public class SocketProxy {
           } catch (SocketTimeoutException expected) {}
         }
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         if (log.isDebugEnabled()) {
           log.debug("acceptor: finished for reason: {}", e.getLocalizedMessage());
         }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ZNodeName.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ZNodeName.java
index 01f0251..7ac2611 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ZNodeName.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ZNodeName.java
@@ -67,7 +67,7 @@ public class ZNodeName implements Comparable<ZNodeName> {
         try {
             return Integer.parseInt(seq);
         } catch (Exception e) {
-            ParWork.propegateInterrupt(e);
+            ParWork.propagateInterrupt(e);
             LOG.warn("Number format exception for {}", seq, e);
             return null;
         }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java
index 2c5c83f..b586d79 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java
@@ -82,7 +82,7 @@ public class AutoScalingConfig implements MapWriter {
           TriggerEventProcessorStage stage = TriggerEventProcessorStage.valueOf(String.valueOf(stageName).toUpperCase(Locale.ROOT));
           stages.add(stage);
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
           log.warn("Invalid stage name '{}' for '{}' in listener config, skipping it in: {}",
               stageName, name, properties);
         }
@@ -174,7 +174,7 @@ public class AutoScalingConfig implements MapWriter {
         try {
           type = TriggerEventType.valueOf(event.toUpperCase(Locale.ROOT));
         } catch (Exception e) {
-          ParWork.propegateInterrupt(e);
+          ParWork.propagateInterrupt(e);
         }
         if (type == null) {
           this.event = TriggerEventType.INVALID;
@@ -320,7 +320,7 @@ public class AutoScalingConfig implements MapWriter {
       try {
         version = (Integer)jsonMap.get(AutoScalingParams.ZK_VERSION);
       } catch (Exception e) {
-        ParWork.propegateInterrupt(e);
+        ParWork.propagateInterrupt(e);
         // ignore
       }
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
index f50ad9d..ddda460 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Clause.java
@@ -397,7 +397,7 @@ public class Clause implements MapWriter, Comparable<Clause> {
     } catch (IllegalArgumentException iae) {
       throw iae;
     } catch (Exception e) {
-      ParWork.propegateInterrupt(e);
+      ParWork.propagateInterrupt(e);
       throwExp(m, " Invalid tag : {0} "+ e.getMessage(), s);
       return null;
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
index 56a4b74..c723550 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
... 3465 lines suppressed ...